def _run_consensus_jobs(tasks, dry_run=False, cons_json_fn=None): """dry_run --- if True, do not actually run the scripts cons_json_fn --- if not None, write dict{p_id: dict{'script_fn':script_fn, 'script_dir':script_dir}} to it """ fns = list() json_data = dict() for p_id, (cons_args, fasta_bfn) in tasks.items(): run_dir = "preads" job_done = "c_%05d_done" % p_id script_fn = os.path.join(run_dir, "c_%05d.sh" % (p_id)) cons_args["job_done"] = job_done cons_args["script_fn"] = script_fn # cons_args <- dict{ # 'out_file_fn': abspath to preads/out.00001.fasta # 'script_fn' : c_00001.sh # 'job_done' : c_00001_done # 'raw_reads' : raw_reads # 'config' : config} support.run_consensus(**cons_args) mkdir(run_dir) with cd(run_dir): if dry_run is False: run_cmd("bash %s" % os.path.basename(script_fn), sys.stdout, sys.stderr, shell=False) fns.append(os.path.join(run_dir, fasta_bfn)) # add script_dir to args for scattered tasks to work in the correct dir json_data[p_id] = { "script_fn": os.path.basename(script_fn), # 'c_00001.sh' "script_dir": os.path.join(os.getcwd(), run_dir), } # '/pbi/.../tasks/falcon_ns.tasks.task_falcon0_run_merge_jobs/preads/' json_fn = "cons_jobs.json" if cons_json_fn is None else cons_json_fn with open(json_fn, "w") as writer: writer.write(json.dumps(json_data) + "\n") return fns # *.fasta ['preads/out.0001.fasta', 'preads/out.00002.fasta', 'preads/out.00003.fasta']
def _run_consensus_jobs(tasks): fns = list() for p_id, (cons_args, fasta_bfn) in tasks.items(): run_dir = 'preads' job_done = "c_%05d_done" %p_id script_fn = os.path.join(run_dir, "c_%05d.sh" %(p_id)) cons_args['job_done'] = job_done cons_args['script_fn'] = script_fn support.run_consensus(**cons_args) run_cmd('bash %s' %script_fn, sys.stdout, sys.stderr, shell=False) fns.append(os.path.join(run_dir, fasta_bfn)) return fns # *.fasta
def _run_consensus_jobs(tasks): fns = list() for p_id, (cons_args, fasta_bfn) in tasks.items(): run_dir = 'preads' job_done = "c_%05d_done" % p_id script_fn = os.path.join(run_dir, "c_%05d.sh" % (p_id)) cons_args['job_done'] = job_done cons_args['script_fn'] = script_fn support.run_consensus(**cons_args) run_cmd('bash %s' % script_fn, sys.stdout, sys.stderr, shell=False) fns.append(os.path.join(run_dir, fasta_bfn)) return fns # *.fasta
def _run_consensus_jobs(tasks, dry_run=False, cons_json_fn=None): """dry_run --- if True, do not actually run the scripts cons_json_fn --- if not None, write dict{p_id: dict{'script_fn':script_fn, 'script_dir':script_dir}} to it """ fns = list() json_data = dict() for p_id, (cons_args, fasta_bfn) in tasks.items(): run_dir = 'preads' job_done = "c_%05d_done" % p_id script_fn = os.path.join(run_dir, "c_%05d.sh" % (p_id)) cons_args['job_done'] = job_done cons_args['script_fn'] = script_fn # cons_args <- dict{ # 'out_file_fn': abspath to preads/out.00001.fasta # 'script_fn' : c_00001.sh # 'job_done' : c_00001_done # 'raw_reads' : raw_reads # 'config' : config} support.run_consensus(**cons_args) mkdir(run_dir) with cd(run_dir): if dry_run is False: run_cmd('bash %s' % os.path.basename(script_fn), sys.stdout, sys.stderr, shell=False) fns.append(os.path.join(run_dir, fasta_bfn)) # add script_dir to args for scattered tasks to work in the correct dir json_data[p_id] = { 'script_fn': os.path.basename(script_fn), # 'c_00001.sh' 'script_dir': os.path.join(os.getcwd(), run_dir) } # '/pbi/.../tasks/falcon_ns.tasks.task_falcon0_run_merge_jobs/preads/' json_fn = "cons_jobs.json" if cons_json_fn is None else cons_json_fn with open(json_fn, 'w') as writer: writer.write(json.dumps(json_data) + "\n") return fns # *.fasta ['preads/out.0001.fasta', 'preads/out.00002.fasta', 'preads/out.00003.fasta']