def sanity_check_daligner(scriptDir, testDirName="daligner_test_dir"): """ Run daligner on gcon_in.fa, but don't care about results. Just make sure it runs. """ scriptDir = realpath(scriptDir) testDir = op.join(scriptDir, testDirName) if not op.exists(scriptDir): os.makedirs(scriptDir) if not op.exists(testDir): os.makedirs(testDir) testInFa = op.join(testDir, "gcon_in.fa") if op.exists(testInFa): os.remove(testInFa) shutil.copy(GCON_IN_FA, testInFa) assert(op.exists(testInFa)) obj = DazzIDHandler(testInFa) DalignerRunner.make_db(obj.dazz_filename) runner = DalignerRunner(testInFa, testInFa, is_FL=True, same_strand_only=True, \ query_converted=True, db_converted=True, query_made=True, \ db_made=True, use_sge=False, cpus=4, sge_opts=None) runner.runHPC(min_match_len=300, output_dir=testDir, sensitive_mode=False) shutil.rmtree(testDir) logging.info("daligner check passed.") return True
def sanity_check_daligner(scriptDir, testDirName="daligner_test_dir"): """ Run daligner on gcon_in.fa, but don't care about results. Just make sure it runs. """ scriptDir = realpath(scriptDir) testDir = op.join(scriptDir, testDirName) if not op.exists(scriptDir): os.makedirs(scriptDir) if not op.exists(testDir): os.makedirs(testDir) testInFa = op.join(testDir, "gcon_in.fa") if op.exists(testInFa): os.remove(testInFa) shutil.copy(GCON_IN_FA, testInFa) assert(op.exists(testInFa)) obj = DazzIDHandler(testInFa) DalignerRunner.make_db(obj.dazz_filename) runner = DalignerRunner(testInFa, testInFa, is_FL=True, same_strand_only=True, \ query_converted=True, db_converted=True, query_made=True, \ db_made=True, use_sge=False, cpus=4, sge_opts=None) runner.runHPC(min_match_len=300, output_dir=testDir, sensitive_mode=False) shutil.rmtree(testDir) logging.info("daligner check passed.") return True
def _align(self, queryFa, output_dir, ice_opts, sge_opts): daligner_sensitive_mode, _low, _high = get_daligner_sensitivity_setting(queryFa) input_obj = DazzIDHandler(queryFa, False) DalignerRunner.make_db(input_obj.dazz_filename) # run this locally runner = DalignerRunner(queryFa, queryFa, is_FL=True, same_strand_only=True, \ query_converted=True, db_converted=True, query_made=True, \ db_made=True, use_sge=False, cpus=4, sge_opts=None) las_filenames, las_out_filenames = runner.runHPC(min_match_len=_low, output_dir=output_dir, sensitive_mode=daligner_sensitive_mode) return input_obj, las_out_filenames
def _align(self, queryFa, output_dir, ice_opts, sge_opts): daligner_sensitive_mode, _low, _high = get_daligner_sensitivity_setting( queryFa) input_obj = DazzIDHandler(queryFa, False) DalignerRunner.make_db(input_obj.dazz_filename) # run this locally runner = DalignerRunner(queryFa, queryFa, is_FL=True, same_strand_only=True, \ query_converted=True, db_converted=True, query_made=True, \ db_made=True, use_sge=False, cpus=4, sge_opts=None) las_filenames, las_out_filenames = runner.runHPC( min_match_len=_low, output_dir=output_dir, sensitive_mode=daligner_sensitive_mode) return input_obj, las_out_filenames
def run(self): """ First, split non-full-length (nfl) fasta files into smaller chunks, assign nfl reads in each splitted fasta file into unpolished isoform clusters and then merge all pickles into self.nfl_all_pickle_fn. Second, bin every 100 clusters, for each bin, call blasr, samto5h, loadPulses, cmph5tools to create cmp.h5 files and call quiver to polish each isoforms within each bin. Finally, pick up good isoform clusters whose QV errors is less than a threshold. Save all high quality isoforms to hq_isoforms_fa|fq if they are not None Save all low quality isoforms to lq_isoforms_fa|fq if they are not None """ # Create final.consensus.fa.sa #self.add_log("Generating suffix array for {f}".format( # f=self.final_consensus_sa), level=logging.INFO) #sa_file = self.get_sa_file() # Create input.fasta.fofn from bas_fofn self.add_log("Creating fasta fofn from bas/bax.h5 fofn", level=logging.INFO) if self.fasta_fofn is None: self.fasta_fofn = op.join(self.nfl_dir, "input.fasta.fofn") self.add_log("bas fofn={f}".format(f=self.bas_fofn)) self.add_log("fasta fofn={f}".format(f=self.fasta_fofn)) if op.exists(self.fasta_fofn): self.add_log("No need to run convert_fofn_to_fasta.") else: convert_fofn_to_fasta(fofn_filename=self.bas_fofn, out_filename=self.fasta_fofn, fasta_out_dir=self.nfl_dir, cpus=self.sge_opts.blasr_nproc) # Split non-full-length reads into smaller fasta files # and save files to root_dir/nfl_00.fa, ..., . self.add_log("Splitting {nfl} into ".format(nfl=self.nfl_fa) + "smaller files each containing {n} reads.".format( n=self.nfl_reads_per_split), level=logging.INFO) self._nfl_splitted_fas = splitFasta(input_fasta=self.nfl_fa, reads_per_split=self.nfl_reads_per_split, out_dir=self.nfl_dir, out_prefix="input.split") msg = "Splitted files are: " + "\n".join(self._nfl_splitted_fas) self.add_log(msg, level=logging.INFO) # Generating dazz DB for final.consensus.fasta ref_obj = DazzIDHandler(self.final_consensus_fa, False) DalignerRunner.make_db(ref_obj.dazz_filename) msg = "Dazz DB made for: " + ref_obj.dazz_filename self.add_log(msg, level=logging.INFO) # Process nfl reads in each splitted fasta. self.add_log("Initializing IceAllPartials.", level=logging.INFO) #sa_file = self.final_consensus_sa \ # if op.exists(self.final_consensus_fa) else None self.icep = IceAllPartials( root_dir=self.root_dir, fasta_filenames=self._nfl_splitted_fas, ref_fasta=self.final_consensus_fa, out_pickle=self.nfl_all_pickle_fn, sge_opts=self.sge_opts, sa_file=None, # since we are switching to daligner, just give it as None now; remove sa_file completely later when daligner is mature (ToDo) ccs_fofn=self.ccs_fofn) self.add_log("IceAllPartials log: {f}.".format(f=self.icep.log_fn), level=logging.INFO) self.icep.run() self.add_log("IceAllPartials completed.", level=logging.INFO) self.add_log("Initializing IceQuiver.", level=logging.INFO) self.iceq = IceQuiver(root_dir=self.root_dir, bas_fofn=self.bas_fofn, fasta_fofn=self.fasta_fofn, sge_opts=self.sge_opts) self.add_log("IceQuiver log: {f}.".format(f=self.iceq.log_fn), level=logging.INFO) self.iceq.run() self.add_log("IceQuiver finished.", level=logging.INFO) self.add_log("Initializing IceQuiverPostprocess.", level=logging.INFO) self.icepq = IceQuiverPostprocess(root_dir=self.root_dir, use_sge=self.sge_opts.use_sge, quit_if_not_done=False, ipq_opts=self.ipq_opts) self.add_log("IceQuiverPostprocess log: {f}.". format(f=self.icepq.log_fn), level=logging.INFO) self.icepq.run() self.add_log("IceQuiverPostprocess finished.", level=logging.INFO)