def run(config, makefiles): check_path(config.temp_root) logfile_template = time.strftime("epiPALEOMIX_pipe.%" "Y%m%d_%H%M%S_%%02i.log") pypeline.logger.initialize(config, logfile_template) logger = logging.getLogger(__name__) pipeline = Pypeline(config=config) topnodes = create_nodes(config, makefiles) assert topnodes, "No analyses to run. Check %s" % (makefiles) pipeline.add_nodes(topnodes) if config.list_output_files: logger.info("Printing output files ...") pipeline.print_output_files() return 0 elif config.list_executables: logger.info("Printing required executables ...") pipeline.print_required_executables() return 0 logger.info("Running Epipaleomix pipeline ...") if not pipeline.run(dry_run=config.dry_run, max_running=config.max_threads, progress_ui=config.progress_ui): return 1 return 0
try: makefiles = read_makefiles(config, args, commands) except (MakefileError, pypeline.yaml.YAMLError, IOError), error: print_err("Error reading makefiles:", "\n %s:\n " % (error.__class__.__name__,), "\n ".join(str(error).split("\n")), file=sys.stderr) return 1 logfile_template = time.strftime("phylo_pipeline.%Y%m%d_%H%M%S_%%02i.log") pypeline.logger.initialize(config, logfile_template) logger = logging.getLogger(__name__) for (command_key, command_func) in commands: logger.info("Building %s pipeline ...", command_key) command_func(pipeline, config, makefiles) for makefile in makefiles: if "Nodes" in makefile: pipeline.add_nodes(makefile["Nodes"]) if config.list_output_files: logger.info("Printing output files ...") pipeline.print_output_files() return 0 elif config.list_orphan_files: logger.info("Printing orphan files ...") for filename in sorted(list_orphan_files(config, makefiles, pipeline)): print(filename) return 0
"Error reading makefiles:", "\n %s:\n " % (error.__class__.__name__,), "\n ".join(str(error).split("\n")), file=sys.stderr, ) return 1 logfile_template = time.strftime("bam_pipeline.%Y%m%d_%H%M%S_%%02i.log") pypeline.logger.initialize(config, logfile_template) logger = logging.getLogger(__name__) # Build .fai files for reference .fasta files index_references(config, makefiles) if config.list_targets: logger.info("Listing targets for %s ...", config.list_targets) for makefile in makefiles: # If a destination is not specified, save results in same folder as # the makefile filename = makefile["Statistics"]["Filename"] old_destination = config.destination if old_destination is None: config.destination = os.path.dirname(filename) list_targets_for(config, makefile, config.list_targets) config.destination = old_destination return 0 pipeline_func = build_pipeline_trimming if config.targets: pipeline_func = build_pipeline_targets
try: makefiles = read_makefiles(config, args, commands) except (MakefileError, pypeline.yaml.YAMLError, IOError), error: print_err("Error reading makefiles:", "\n %s:\n " % (error.__class__.__name__, ), "\n ".join(str(error).split("\n")), file=sys.stderr) return 1 logfile_template = time.strftime("phylo_pipeline.%Y%m%d_%H%M%S_%%02i.log") pypeline.logger.initialize(config, logfile_template) logger = logging.getLogger(__name__) pipeline = Pypeline(config) for (command_key, command_func) in commands: logger.info("Building %s pipeline ...", command_key) command_func(pipeline, config, makefiles) for makefile in makefiles: if "Nodes" in makefile: pipeline.add_nodes(makefile["Nodes"]) if config.list_output_files: logger.info("Printing output files ...") pipeline.print_output_files() return 0 elif config.list_orphan_files: logger.info("Printing orphan files ...") for filename in sorted(list_orphan_files(config, makefiles, pipeline)): print(filename) return 0
try: makefiles = read_makefiles(config, args, commands) except (MakefileError, pypeline.yaml.YAMLError, IOError), error: print_err("Error reading makefiles:", "\n %s:\n " % (error.__class__.__name__, ), "\n ".join(str(error).split("\n")), file=sys.stderr) return 1 logfile_template = time.strftime("phylo_pipeline.%Y%m%d_%H%M%S_%%02i.log") pypeline.logger.initialize(config, logfile_template) logger = logging.getLogger(__name__) for (command_key, command_func) in commands: logger.info("Building %s pipeline ...", command_key) command_func(pipeline, config, makefiles) for makefile in makefiles: if "Nodes" in makefile: pipeline.add_nodes(makefile["Nodes"]) if config.list_output_files: logger.info("Printing output files ...") pipeline.print_output_files() return 0 elif config.list_orphan_files: logger.info("Printing orphan files ...") for filename in sorted(list_orphan_files(config, makefiles, pipeline)): print(filename) return 0
except (MakefileError, pypeline.yaml.YAMLError, IOError), error: print_err("Error reading makefiles:", "\n %s:\n " % (error.__class__.__name__,), "\n ".join(str(error).split("\n")), file=sys.stderr) return 1 logfile_template = time.strftime("bam_pipeline.%Y%m%d_%H%M%S_%%02i.log") pypeline.logger.initialize(config, logfile_template) logger = logging.getLogger(__name__) # Build .fai files for reference .fasta files index_references(config, makefiles) if config.list_targets: logger.info("Listing targets for %s ...", config.list_targets) for makefile in makefiles: # If a destination is not specified, save results in same folder as # the makefile filename = makefile["Statistics"]["Filename"] old_destination = config.destination if old_destination is None: config.destination = os.path.dirname(filename) list_targets_for(config, makefile, config.list_targets) config.destination = old_destination return 0 pipeline_func = build_pipeline_trimming if config.targets: pipeline_func = build_pipeline_targets
except (MakefileError, pypeline.yaml.YAMLError, IOError), error: print_err("Error reading makefiles:", "\n %s:\n " % (error.__class__.__name__, ), "\n ".join(str(error).split("\n")), file=sys.stderr) return 1 logfile_template = time.strftime("bam_pipeline.%Y%m%d_%H%M%S_%%02i.log") pypeline.logger.initialize(config, logfile_template) logger = logging.getLogger(__name__) # Build .fai files for reference .fasta files index_references(config, makefiles) if config.list_targets: logger.info("Listing targets for %s ...", config.list_targets) for makefile in makefiles: # If a destination is not specified, save results in same folder as # the makefile filename = makefile["Statistics"]["Filename"] old_destination = config.destination if old_destination is None: config.destination = os.path.dirname(filename) list_targets_for(config, makefile, config.list_targets) config.destination = old_destination return 0 pipeline_func = build_pipeline_trimming if config.targets: pipeline_func = build_pipeline_targets
try: makefiles = read_makefiles(config, args, commands) except (MakefileError, pypeline.yaml.YAMLError, IOError), error: print_err("Error reading makefiles:", "\n %s:\n " % (error.__class__.__name__,), "\n ".join(str(error).split("\n")), file=sys.stderr) return 1 logfile_template = time.strftime("phylo_pipeline.%Y%m%d_%H%M%S_%%02i.log") pypeline.logger.initialize(config, logfile_template) logger = logging.getLogger(__name__) pipeline = Pypeline(config) for (command_key, command_func) in commands: logger.info("Building %s pipeline ...", command_key) command_func(pipeline, config, makefiles) for makefile in makefiles: if "Nodes" in makefile: pipeline.add_nodes(makefile["Nodes"]) if config.list_output_files: logger.info("Printing output files ...") pipeline.print_output_files() return 0 elif config.list_orphan_files: logger.info("Printing orphan files ...") for filename in sorted(list_orphan_files(config, makefiles, pipeline)): print(filename) return 0