def main(): args = parse_args() synapse_login() nda_login() samples = sample_list(args.infile) for key, val in samples.items(): sample, filetype = key print(sample) f_run_info = sample + "/run_info" run_info(f_run_info) run_info_append(f_run_info, "\n#SYNAPSE\nPARENTID={}".format(args.parentid)) jid_list = [] for sdata in val: fname, loc = sdata if filetype == "bam": jid_list.append(submit_pre_jobs_bam(sample, fname, loc)) else: jid_list.append(submit_pre_jobs_fastq(sample, fname, loc)) jid = ",".join(jid_list) submit_aln_jobs(sample, jid) print()
def main(): args = parse_args() synapse_login() nda_login() samples = sample_list(args.infile) for sample, sdata in samples.items(): print(sample) run_info(sample + "/run_info") jid_pre = submit_pre_jobs(sample, sdata) jid_list = [] for ploidy in range(2, 11): jid = submit_gatk_jobs(sample, ploidy, jid_pre) jid = submit_filter_jobs(sample, ploidy, jid) jid_list.append(jid) jid = ",".join(jid_list) submit_post_jobs(sample, jid) print()
def main(): args = parse_args() synapse_login() nda_login() global down_jid_queue down_jid_queue = deque([None] * args.con_down_limit) samples = sample_list(args.sample_list) for (sample, filetype), sdata in samples.items(): print("- Sample: " + sample) f_run_jid = sample + "/run_jid" if q.num_run_jid_in_queue(f_run_jid) > 0: print("There are submitted jobs for this sample.") print("Skip to submit jobs.\n") continue q.set_run_jid(f_run_jid, new=True) f_run_info = sample + "/run_info" run_info(f_run_info) run_info_append(f_run_info, "\n#RUN_OPTIONS") run_info_append(f_run_info, "UPLOAD={}".format(args.upload)) run_info_append(f_run_info, "RUN_CNVNATOR={}".format(args.run_cnvnator)) run_info_append(f_run_info, "RUN_MUTECT_SINGLE={}".format(args.run_mutect_single)) if args.run_gatk_hc: ploidy = " ".join(str(i) for i in args.run_gatk_hc) run_info_append(f_run_info, "RUN_GATK_HC=True\nPLOIDY=\"{}\"".format(ploidy)) else: run_info_append(f_run_info, "RUN_GATK_HC={}".format(args.run_gatk_hc)) if filetype == "bam": jid = submit_pre_jobs_bam(sample, sdata) else: jid = submit_pre_jobs_fastq(sample, sdata) submit_aln_jobs(sample, jid) print()
def main(): args = parse_args() synapse_login() nda_login() global down_jid_queue down_jid_queue = deque([None] * args.con_down_limit) samples = sample_list(args.sample_list) for (sample, filetype), sdata in samples.items(): print("- Sample: " + sample) f_run_jid = sample + "/run_jid" if q.num_run_jid_in_queue(f_run_jid) > 0: print("There are submitted jobs for this sample.") print("Skip to submit jobs.\n") continue q.set_run_jid(f_run_jid, new=True) f_run_info = sample + "/run_info" run_info(f_run_info) run_info_append(f_run_info, "\n#RUN_OPTIONS") run_info_append(f_run_info, "UPLOAD={}".format(args.upload)) run_info_append(f_run_info, "RUN_CNVNATOR={}".format(args.run_cnvnator)) run_info_append(f_run_info, "RUN_MUTECT_SINGLE={}".format(args.run_mutect_single)) if args.run_gatk_hc: ploidy = " ".join(str(i) for i in args.run_gatk_hc) run_info_append(f_run_info, "RUN_GATK_HC=True\nPLOIDY=\"{}\"".format(ploidy)) else: run_info_append(f_run_info, "RUN_GATK_HC={}".format(args.run_gatk_hc)) if filetype == "fastq": raise Exception("The input filetype should be bam or cram.") global down_jid jid_list = [] for fname, loc in sdata: down_jid = down_jid_queue.popleft() jid = q.submit( opt(sample, down_jid), "{job_home}/pre_1.download.sh {sample} {fname} {loc}".format( job_home=job_home, sample=sample, fname=fname, loc=loc)) jid_list.append(jid) down_jid_queue.append(jid) jid = ",".join(jid_list) if filetype == "bam": jid = q.submit( opt(sample, jid), "{job_home}/pre_2.bam2cram.sh {sample}".format( job_home=job_home, sample=sample)) jid = q.submit( opt(sample, jid), "{job_home}/pre_2b.unmapped_reads.sh {sample}".format( job_home=job_home, sample=sample)) jid = q.submit( opt(sample, jid), "{job_home}/pre_3.run_variant_calling.sh {sample}".format( job_home=job_home, sample=sample)) q.submit( opt(sample, jid), "{job_home}/pre_4.upload_cram.sh {sample}".format( job_home=job_home, sample=sample)) print()