class ProcessWriteMultisTest(unittest.TestCase): def setUp(self): if not os.path.exists(temp_dir): os.makedirs(temp_dir) self.aln_obj = AlignmentList([dna_data_fas[0]], sql_db=sql_db) os.makedirs("output") self.output_dir = os.path.join("output") def tearDown(self): shutil.rmtree("output") self.aln_obj.con.close() shutil.rmtree(temp_dir) def test_custom_taxaset_nexus(self): """ Test explicitly for the head of the nexus, which should update the ntax parameter when changing the active taxa set """ self.aln_obj.update_taxa_names(["spa", "spb", "spc"]) self.aln_obj.write_to_file(["nexus"], output_dir=self.output_dir) # Get the specific line with the ntax parameter header_line = "" with open(os.path.join(self.output_dir, "BaseConc1.nex")) as fh: while not header_line.startswith("dimensions"): header_line = next(fh).strip() ref_header = "dimensions ntax=3 nchar=85 ;" self.assertEqual(header_line, ref_header)
def setUp(self): aln_obj = AlignmentList(dna_data_fas, sql_db=sql_db) self.con = aln_obj.con self.aln_obj = aln_obj.concatenate(alignment_name="test") os.makedirs("output") self.output_file = os.path.join("output", "test")
def setUp(self): if not os.path.exists(temp_dir): os.makedirs(temp_dir) self.aln_obj = AlignmentList([dna_data_fas[0]], sql_db=sql_db) os.makedirs("output") self.output_dir = os.path.join("output")
def setUp(self): if not os.path.exists(temp_dir): os.makedirs(temp_dir) self.aln_obj = AlignmentList(dna_data_fas, sql_db=sql_db) self.aln_obj.concatenate() os.makedirs("output") self.output_file = os.path.join("output", "test")
def test_iter_columns(self): self.aln_obj = AlignmentList([variable_data[1]]) s = 0 for col, aln_idx in self.aln_obj.iter_columns(): if len(set(col)) > 1: s += 1 self.assertEqual(s, 3)
def test_iter_columns_with_active_tx(self): self.aln_obj = AlignmentList([variable_data[1]]) self.aln_obj.update_taxa_names(self.aln_obj.taxa_names[1:]) s = 0 for col, aln_idx in self.aln_obj.iter_columns(): if len(set(col)) > 1: s += 1 self.assertEqual(s, 2)
def test_write_snapp(self): self.aln_obj.clear_alignments() self.aln_obj.con.close() os.remove(sql_db) self.aln_obj = AlignmentList(variable_data, sql_db=sql_db) self.aln_obj.concatenate() self.aln_obj.write_to_file(["snapp"], output_file=self.output_file) with open(self.output_file + "_snapp.nex") as fh: res = sorted(fh.readlines()) with open(snapp_output[0]) as fh: ref = sorted(fh.readlines()) self.assertEqual(res, ref)
def setUp(self): aln_obj = AlignmentList([], sql_db=sql_db) self.con = aln_obj.con self.aln_obj = Alignment(dna_data_fas[0], sql_cursor=aln_obj.cur) if not os.path.exists("output"): os.makedirs("output") self.output_file = os.path.join("output", "test")
class ProcessWriteSinglesTest(unittest.TestCase): def setUp(self): if not os.path.exists(temp_dir): os.makedirs(temp_dir) self.aln_obj = AlignmentList([dna_data_fas[0]], sql_db=sql_db) if not os.path.exists("output"): os.makedirs("output") self.output_file = os.path.join("output", "test") def tearDown(self): self.aln_obj.con.close() os.remove(sql_db) shutil.rmtree("output") shutil.rmtree(temp_dir) def test_write_gphocs(self): self.aln_obj.write_to_file(["gphocs"], output_file=self.output_file) def test_write_mcmctree(self): self.aln_obj.write_to_file(["mcmctree"], output_file=self.output_file) def test_write_nexus(self): self.aln_obj.write_to_file(["nexus"], output_file=self.output_file)
def test_iter_columns_with_active_tx(self): self.aln_obj = AlignmentList([variable_data[1]]) self.aln_obj.update_taxa_names( self.aln_obj.taxa_names[1:]) s = 0 for col, aln_idx in self.aln_obj.iter_columns(): if len(set(col)) > 1: s += 1 self.assertEqual(s, 2)
class SeconaryOpsTest(unittest.TestCase): def setUp(self): if not os.path.exists(temp_dir): os.makedirs(temp_dir) self.aln_obj = AlignmentList([], sql_db=sql_db) def tearDown(self): self.aln_obj.clear_alignments() self.aln_obj.con.close() shutil.rmtree(temp_dir) def test_collapse_single(self): self.aln_obj.add_alignment_files([variable_data[0]]) if not os.path.exists("test_collapse"): os.makedirs("test_collapse") self.aln_obj.collapse(haplotype_name="Testing", haplotypes_file="teste", dest="test_collapse", use_main_table=True) aln = self.aln_obj.alignments.values()[0] tn = len(list(aln.iter_sequences())) self.assertEqual(tn, 1) shutil.rmtree("test_collapse") def test_collapse_with_variation(self): self.aln_obj.add_alignment_files([variable_data[1]]) if not os.path.exists("test_collapse"): os.makedirs("test_collapse") self.aln_obj.collapse(haplotype_name="Testing", haplotypes_file="teste", dest="test_collapse", use_main_table=True) aln = self.aln_obj.alignments.values()[0] tn = len(list(aln.iter_sequences())) self.assertEqual(tn, 4) shutil.rmtree("test_collapse") def test_collapse_after_concatenation(self): self.aln_obj.add_alignment_files(variable_data) if not os.path.exists("test_collapse"): os.makedirs("test_collapse") self.aln_obj.concatenate() self.aln_obj.collapse(haplotype_name="Testing", haplotypes_file="teste", dest="test_collapse", table_out="collapse") tn = len(list(self.aln_obj.iter_alignments("collapse"))) self.assertEqual(tn, 8) shutil.rmtree("test_collapse") def test_gcoder(self): self.aln_obj.add_alignment_files(gcode_data) self.aln_obj.code_gaps(use_main_table=True) s = [] for aln in self.aln_obj: for seq in aln.iter_sequences(): s.append(seq) res = [ "aaaaaaaa-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa10000", "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa---aaaaaaaaaaa01000", "aaaaaaaaaaaa--aaaaaaaaaaaaaaaaaaaaa---aaaaaaaaaaa01100", "aaaaaaaaaaaa--aaaaaaaaaaaaaaaaaaaaa---aaaaaaaaaaa01100", "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa---aaaaaaaaaaa01000", "aaaaaaaaaaaaaaaaaaaaaa----aaaaaaaaa---aaaaaaaaaaa01010", "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-aaa---aaaaaaaaaaa01001", "aaaaaaaaaaaaaaaaaaaaaa----aaaaaaaaa---aaaaaaaaaaa01010", "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa---aaaaaaaaaaa01000", "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa---aaaaaaaaaaa01000" ] self.assertEqual(sorted(s), sorted(res)) def test_consensus_multi_file(self): self.aln_obj.add_alignment_files(dna_data_fas) self.aln_obj.consensus("IUPAC", use_main_table=True) s = [] for aln in self.aln_obj: s.append(len(aln.taxa_idx)) self.assertEqual(s, [1] * 7) def test_consensus_single_file(self): self.aln_obj.add_alignment_files(dna_data_fas) self.aln_obj.consensus("IUPAC", single_file=True) self.assertEqual(len(self.aln_obj.taxa_names), 7) def test_consensus_soft_mask(self): self.aln_obj.add_alignment_files(dna_data_fas) self.aln_obj.consensus("Soft mask", use_main_table=True) s = [] for aln in self.aln_obj: s.append(len(aln.taxa_idx)) self.assertEqual(s, [1] * 7) def test_consensus_remove(self): self.aln_obj.add_alignment_files(dna_data_fas) self.aln_obj.consensus("Remove", use_main_table=True) s = [] for aln in self.aln_obj: s.append(len(aln.taxa_idx)) self.assertEqual(s, [1] * 7) def test_consensus_first_seq(self): self.aln_obj.add_alignment_files(dna_data_fas) self.aln_obj.consensus("First sequence", use_main_table=True) s = [] for aln in self.aln_obj: s.append(len(aln.taxa_idx)) self.assertEqual(s, [1] * 7) def test_consensus_first_seq2(self): self.aln_obj.add_alignment_files(dna_data_fas) self.aln_obj.consensus("First sequence", use_main_table=True) s = 0 for _ in self.aln_obj.iter_alignments(): s += 1 self.assertEqual(s, 7) def test_reverse_concatenate(self): self.aln_obj.add_alignment_files(concatenated_small_phy) self.aln_obj.partitions.read_from_file(concatenated_small_par[0]) self.aln_obj.reverse_concatenate() self.assertEqual(len(self.aln_obj.alignments), 7) def test_zorro(self): self.aln_obj.add_alignment_files(zorro_data_fas) # Generate zorro output zorro_data = Zorro(self.aln_obj, "_zorro") zorro_data.write_to_file("test") # Read zorro and reference files zorro_content = open("test_zorro.out").read() reference = open(zorro_out).read() self.assertEqual(zorro_content, reference) os.remove("test_zorro.out") def test_zorro_with_dir(self): self.aln_obj.add_alignment_files(zorro_data_fas) # Generate zorro output zorro_data = Zorro(self.aln_obj, "_zorro", "trifusion/tests/data") zorro_data.write_to_file("test") # Read zorro and reference files zorro_content = open("test_zorro.out").read() reference = open(zorro_out).read() self.assertEqual(zorro_content, reference) os.remove("test_zorro.out") def test_iter_columns(self): self.aln_obj = AlignmentList([variable_data[1]]) s = 0 for col, aln_idx in self.aln_obj.iter_columns(): if len(set(col)) > 1: s += 1 self.assertEqual(s, 3) def test_iter_columns_with_active_tx(self): self.aln_obj = AlignmentList([variable_data[1]]) self.aln_obj.update_taxa_names( self.aln_obj.taxa_names[1:]) s = 0 for col, aln_idx in self.aln_obj.iter_columns(): if len(set(col)) > 1: s += 1 self.assertEqual(s, 2)
def setUp(self): self.aln_obj = AlignmentList([], sql_db=sql_db)
class ProcessWriteTest(unittest.TestCase): def setUp(self): if not os.path.exists(temp_dir): os.makedirs(temp_dir) self.aln_obj = AlignmentList(dna_data_fas, sql_db=sql_db) self.aln_obj.concatenate() os.makedirs("output") self.output_file = os.path.join("output", "test") def tearDown(self): self.aln_obj.clear_alignments() self.aln_obj.con.close() shutil.rmtree("output") shutil.rmtree(temp_dir) def test_write_fasta(self): self.aln_obj.write_to_file(["fasta"], output_file=self.output_file) self.assertEqual(x.autofinder(self.output_file + ".fas")[0], "fasta") def test_write_fasta_interleave(self): self.aln_obj.write_to_file(["fasta"], output_file=self.output_file, interleave=True) def test_write_nexus(self): self.aln_obj.write_to_file(["nexus"], output_file=self.output_file) self.assertEqual(x.autofinder(self.output_file + ".nex")[0], "nexus") def test_write_nexus_interleave(self): self.aln_obj.write_to_file(["nexus"], output_file=self.output_file, interleave=True) def test_write_mcmctree(self): self.aln_obj.write_to_file(["mcmctree"], output_file=self.output_file) self.assertEqual(x.autofinder( self.output_file + "_mcmctree.phy")[0], "phylip") def test_write_phy(self): self.aln_obj.write_to_file(["phylip"], output_file=self.output_file) self.assertEqual(x.autofinder(self.output_file + ".phy")[0], "phylip") def test_write_stockholm(self): self.aln_obj.write_to_file(["stockholm"], output_file=self.output_file) self.assertEqual(x.autofinder(self.output_file + ".stockholm")[0], "stockholm") def test_write_gphocs(self): self.aln_obj.write_to_file(["gphocs"], output_file=self.output_file) def test_write_ima2(self): ima2_params = [ima2_pop_file, "(1,2):3)4:5", "IS", "1"] self.aln_obj.write_to_file(["ima2"], output_file=self.output_file, ima2_params=ima2_params) def test_write_interleave(self): self.aln_obj.write_to_file(["phylip"], output_file=self.output_file, interleave=True) def test_write_upper_case_phy(self): self.aln_obj.write_to_file(["phylip"], output_file=self.output_file, upper_case=True) flag = True with open(self.output_file + ".phy") as fh: next(fh) for line in fh: seq = line.strip().split()[1] if not seq.isupper(): flag = False self.assertTrue(flag, True) def test_write_upper_case_fasta(self): self.aln_obj.write_to_file(["fasta"], output_file=self.output_file, upper_case=True) flag = True with open(self.output_file + ".fas") as fh: for line in fh: if line.startswith(">") or line.strip() == "": continue else: if not line.strip().isupper(): flag = False self.assertTrue(flag, True) def test_write_gap(self): self.aln_obj.write_to_file(["fasta", "phylip", "nexus", "mcmctree", "stockholm", "gphocs"], output_file=self.output_file, gap="?") def test_write_model_phy(self): self.aln_obj.write_to_file(["fasta", "phylip", "nexus", "mcmctree", "stockholm", "gphocs"], output_file=self.output_file, model_phylip="LG") def test_write_outgoup_list(self): self.aln_obj.write_to_file(["fasta", "phylip", "nexus", "mcmctree", "stockholm", "gphocs"], output_file=self.output_file, outgroup_list=["spa", "spb"]) def test_write_use_charset(self): self.aln_obj.write_to_file(["fasta", "phylip", "nexus", "mcmctree", "stockholm", "gphocs"], output_file=self.output_file, use_charset=False) def test_write_partition_file(self): self.aln_obj.write_to_file(["fasta", "phylip", "nexus", "mcmctree", "stockholm", "gphocs"], output_file=self.output_file, partition_file=False) def test_write_output_dir(self): self.aln_obj.write_to_file(["fasta", "phylip", "nexus", "mcmctree", "stockholm", "gphocs"], output_dir="test2") shutil.rmtree("test2") def test_write_ldhat(self): self.aln_obj.write_to_file(["fasta", "phylip", "nexus", "mcmctree", "stockholm", "gphocs"], output_file=self.output_file, ld_hat=True) def test_write_snapp(self): self.aln_obj.clear_alignments() self.aln_obj.con.close() os.remove(sql_db) self.aln_obj = AlignmentList(variable_data, sql_db=sql_db) self.aln_obj.concatenate() self.aln_obj.write_to_file(["snapp"], output_file=self.output_file) with open(self.output_file + "_snapp.nex") as fh: res = sorted(fh.readlines()) with open(snapp_output[0]) as fh: ref = sorted(fh.readlines()) self.assertEqual(res, ref) def test_get_non_contiguous_partitions(self): self.aln_obj.partitions.merge_partitions(["BaseConc1.fas", "BaseConc3.fas", "BaseConc7.fas"], "non_contiguous") self.aln_obj.write_to_file(["mcmctree", "stockholm", "gphocs", "snapp"], output_file=self.output_file) def test_write_non_contiguous_partitions(self): self.aln_obj.partitions.merge_partitions( ["BaseConc1.fas", "BaseConc3.fas", "BaseConc7.fas"], "non_contiguous") self.aln_obj.write_to_file(["phylip", "nexus"], output_file=self.output_file)
class SeconaryOpsTest(unittest.TestCase): def setUp(self): self.aln_obj = AlignmentList([], sql_db=sql_db) def tearDown(self): self.aln_obj.clear_alignments() self.aln_obj.con.close() os.remove(sql_db) def test_collapse_single(self): self.aln_obj.add_alignment_files([variable_data[0]]) if not os.path.exists("test_collapse"): os.makedirs("test_collapse") self.aln_obj.collapse(haplotype_name="Testing", haplotypes_file="teste", dest="test_collapse", use_main_table=True) aln = self.aln_obj.alignments.values()[0] tn = len(list(aln.iter_sequences(table_suffix="_collapse"))) self.assertEqual(tn, 1) shutil.rmtree("test_collapse") def test_collapse_with_variation(self): self.aln_obj.add_alignment_files([variable_data[1]]) if not os.path.exists("test_collapse"): os.makedirs("test_collapse") self.aln_obj.collapse(haplotype_name="Testing", haplotypes_file="teste", dest="test_collapse", use_main_table=True) aln = self.aln_obj.alignments.values()[0] tn = len(list(aln.iter_sequences(table_suffix="_collapse"))) self.assertEqual(tn, 4) shutil.rmtree("test_collapse") def test_collapse_after_concatenation(self): self.aln_obj.add_alignment_files(variable_data) if not os.path.exists("test_collapse"): os.makedirs("test_collapse") aln = self.aln_obj.concatenate(alignment_name="test") aln.collapse(haplotype_name="Testing", haplotypes_file="teste", dest="test_collapse", table_out="_collapse") tn = len(list(aln.iter_sequences(table_suffix="_collapse"))) self.assertEqual(tn, 7) shutil.rmtree("test_collapse") def test_gcoder(self): self.aln_obj.add_alignment_files(gcode_data) self.aln_obj.code_gaps(use_main_table=True) s = [] for aln in self.aln_obj: for seq in aln.iter_sequences(): s.append(seq) res = [ "aaaaaaaa-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa10000", "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa---aaaaaaaaaaa01000", "aaaaaaaaaaaa--aaaaaaaaaaaaaaaaaaaaa---aaaaaaaaaaa01100", "aaaaaaaaaaaa--aaaaaaaaaaaaaaaaaaaaa---aaaaaaaaaaa01100", "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa---aaaaaaaaaaa01000", "aaaaaaaaaaaaaaaaaaaaaa----aaaaaaaaa---aaaaaaaaaaa01010", "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-aaa---aaaaaaaaaaa01001", "aaaaaaaaaaaaaaaaaaaaaa----aaaaaaaaa---aaaaaaaaaaa01010", "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa---aaaaaaaaaaa01000", "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa---aaaaaaaaaaa01000" ] self.assertEqual(sorted(s), sorted(res)) def test_consensus_multi_file(self): self.aln_obj.add_alignment_files(dna_data_fas) self.aln_obj.consensus("IUPAC", use_main_table=True) s = [] for aln in self.aln_obj: s.append(len(aln.taxa_list)) self.assertEqual(s, [1] * 7) def test_consensus_single_file(self): self.aln_obj.add_alignment_files(dna_data_fas) aln = self.aln_obj.consensus("IUPAC", single_file=True) self.assertEqual(len(aln.taxa_list), 7) def test_consensus_soft_mask(self): self.aln_obj.add_alignment_files(dna_data_fas) self.aln_obj.consensus("Soft mask", use_main_table=True) s = [] for aln in self.aln_obj: s.append(len(aln.taxa_list)) self.assertEqual(s, [1] * 7) def test_consensus_remove(self): self.aln_obj.add_alignment_files(dna_data_fas) self.aln_obj.consensus("Remove", use_main_table=True) s = [] for aln in self.aln_obj: s.append(len(aln.taxa_list)) self.assertEqual(s, [1] * 7) def test_consensus_first_seq(self): self.aln_obj.add_alignment_files(dna_data_fas) self.aln_obj.consensus("First sequence", use_main_table=True) s = [] for aln in self.aln_obj: s.append(len(aln.taxa_list)) self.assertEqual(s, [1] * 7) def test_reverse_concatenate(self): self.aln_obj.add_alignment_files(concatenated_small_phy) partition_obj = Partitions() # In case the partitions file is badly formatted or invalid, the # exception will be returned by the read_from_file method. partition_obj.read_from_file(concatenated_small_par[0]) aln = self.aln_obj.concatenate(alignment_name="test") aln.set_partitions(partition_obj) alns = aln.reverse_concatenate(table_in="concatenation", db_con=self.aln_obj.con) self.assertEqual(len(alns.alignments), 7) def test_zorro(self): self.aln_obj.add_alignment_files(zorro_data_fas) # Generate zorro output zorro_data = Zorro(self.aln_obj, "_zorro") zorro_data.write_to_file("test") # Read zorro and reference files zorro_content = open("test_zorro.out").read() reference = open(zorro_out).read() self.assertEqual(zorro_content, reference) os.remove("test_zorro.out") def test_zorro_with_dir(self): self.aln_obj.add_alignment_files(zorro_data_fas) # Generate zorro output zorro_data = Zorro(self.aln_obj, "_zorro", "trifusion/tests/data") zorro_data.write_to_file("test") # Read zorro and reference files zorro_content = open("test_zorro.out").read() reference = open(zorro_out).read() self.assertEqual(zorro_content, reference) os.remove("test_zorro.out")
class SeconaryOpsTest(unittest.TestCase): def setUp(self): if not os.path.exists(temp_dir): os.makedirs(temp_dir) self.aln_obj = AlignmentList([], sql_db=sql_db) def tearDown(self): self.aln_obj.clear_alignments() self.aln_obj.con.close() shutil.rmtree(temp_dir) def test_collapse_single(self): self.aln_obj.add_alignment_files([variable_data[0]]) if not os.path.exists("test_collapse"): os.makedirs("test_collapse") self.aln_obj.collapse(haplotype_name="Testing", haplotypes_file="teste", dest="test_collapse", use_main_table=True) aln = self.aln_obj.alignments.values()[0] tn = len(list(aln.iter_sequences())) self.assertEqual(tn, 1) shutil.rmtree("test_collapse") def test_collapse_with_variation(self): self.aln_obj.add_alignment_files([variable_data[1]]) if not os.path.exists("test_collapse"): os.makedirs("test_collapse") self.aln_obj.collapse(haplotype_name="Testing", haplotypes_file="teste", dest="test_collapse", use_main_table=True) aln = self.aln_obj.alignments.values()[0] tn = len(list(aln.iter_sequences())) self.assertEqual(tn, 4) shutil.rmtree("test_collapse") def test_collapse_after_concatenation(self): self.aln_obj.add_alignment_files(variable_data) if not os.path.exists("test_collapse"): os.makedirs("test_collapse") self.aln_obj.concatenate() self.aln_obj.collapse(haplotype_name="Testing", haplotypes_file="teste", dest="test_collapse", table_out="collapse") tn = len(list(self.aln_obj.iter_alignments("collapse"))) self.assertEqual(tn, 8) shutil.rmtree("test_collapse") def test_gcoder(self): self.aln_obj.add_alignment_files(gcode_data) self.aln_obj.code_gaps(use_main_table=True) s = [] for aln in self.aln_obj: for seq in aln.iter_sequences(): s.append(seq) res = [ "aaaaaaaa-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa10000", "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa---aaaaaaaaaaa01000", "aaaaaaaaaaaa--aaaaaaaaaaaaaaaaaaaaa---aaaaaaaaaaa01100", "aaaaaaaaaaaa--aaaaaaaaaaaaaaaaaaaaa---aaaaaaaaaaa01100", "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa---aaaaaaaaaaa01000", "aaaaaaaaaaaaaaaaaaaaaa----aaaaaaaaa---aaaaaaaaaaa01010", "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-aaa---aaaaaaaaaaa01001", "aaaaaaaaaaaaaaaaaaaaaa----aaaaaaaaa---aaaaaaaaaaa01010", "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa---aaaaaaaaaaa01000", "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa---aaaaaaaaaaa01000" ] self.assertEqual(sorted(s), sorted(res)) def test_consensus_multi_file(self): self.aln_obj.add_alignment_files(dna_data_fas) self.aln_obj.consensus("IUPAC", use_main_table=True) s = [] for aln in self.aln_obj: s.append(len(aln.taxa_idx)) self.assertEqual(s, [1] * 7) def test_consensus_single_file(self): self.aln_obj.add_alignment_files(dna_data_fas) self.aln_obj.consensus("IUPAC", single_file=True) self.assertEqual(len(self.aln_obj.taxa_names), 7) def test_consensus_soft_mask(self): self.aln_obj.add_alignment_files(dna_data_fas) self.aln_obj.consensus("Soft mask", use_main_table=True) s = [] for aln in self.aln_obj: s.append(len(aln.taxa_idx)) self.assertEqual(s, [1] * 7) def test_consensus_remove(self): self.aln_obj.add_alignment_files(dna_data_fas) self.aln_obj.consensus("Remove", use_main_table=True) s = [] for aln in self.aln_obj: s.append(len(aln.taxa_idx)) self.assertEqual(s, [1] * 7) def test_consensus_first_seq(self): self.aln_obj.add_alignment_files(dna_data_fas) self.aln_obj.consensus("First sequence", use_main_table=True) s = [] for aln in self.aln_obj: s.append(len(aln.taxa_idx)) self.assertEqual(s, [1] * 7) def test_consensus_first_seq2(self): self.aln_obj.add_alignment_files(dna_data_fas) self.aln_obj.consensus("First sequence", use_main_table=True) s = 0 for _ in self.aln_obj.iter_alignments(): s += 1 self.assertEqual(s, 7) def test_reverse_concatenate(self): self.aln_obj.add_alignment_files(concatenated_small_phy) self.aln_obj.partitions.read_from_file(concatenated_small_par[0]) self.aln_obj.reverse_concatenate() self.assertEqual(len(self.aln_obj.alignments), 7) def test_zorro(self): self.aln_obj.add_alignment_files(zorro_data_fas) # Generate zorro output zorro_data = Zorro(self.aln_obj, "_zorro") zorro_data.write_to_file("test") # Read zorro and reference files zorro_content = open("test_zorro.out").read() reference = open(zorro_out).read() self.assertEqual(zorro_content, reference) os.remove("test_zorro.out") def test_zorro_with_dir(self): self.aln_obj.add_alignment_files(zorro_data_fas) # Generate zorro output zorro_data = Zorro(self.aln_obj, "_zorro", "trifusion/tests/data") zorro_data.write_to_file("test") # Read zorro and reference files zorro_content = open("test_zorro.out").read() reference = open(zorro_out).read() self.assertEqual(zorro_content, reference) os.remove("test_zorro.out") def test_iter_columns(self): self.aln_obj = AlignmentList([variable_data[1]]) s = 0 for col, aln_idx in self.aln_obj.iter_columns(): if len(set(col)) > 1: s += 1 self.assertEqual(s, 3) def test_iter_columns_with_active_tx(self): self.aln_obj = AlignmentList([variable_data[1]]) self.aln_obj.update_taxa_names(self.aln_obj.taxa_names[1:]) s = 0 for col, aln_idx in self.aln_obj.iter_columns(): if len(set(col)) > 1: s += 1 self.assertEqual(s, 2)
class ProcessWriteTest(unittest.TestCase): def setUp(self): if not os.path.exists(temp_dir): os.makedirs(temp_dir) self.aln_obj = AlignmentList(dna_data_fas, sql_db=sql_db) self.aln_obj.concatenate() os.makedirs("output") self.output_file = os.path.join("output", "test") def tearDown(self): self.aln_obj.clear_alignments() self.aln_obj.con.close() shutil.rmtree("output") shutil.rmtree(temp_dir) def test_write_fasta(self): self.aln_obj.write_to_file(["fasta"], output_file=self.output_file) self.assertEqual(x.autofinder(self.output_file + ".fas")[0], "fasta") def test_write_fasta_interleave(self): self.aln_obj.write_to_file(["fasta"], output_file=self.output_file, interleave=True) def test_write_nexus(self): self.aln_obj.write_to_file(["nexus"], output_file=self.output_file) self.assertEqual(x.autofinder(self.output_file + ".nex")[0], "nexus") def test_write_nexus_interleave(self): self.aln_obj.write_to_file(["nexus"], output_file=self.output_file, interleave=True) def test_write_mcmctree(self): self.aln_obj.write_to_file(["mcmctree"], output_file=self.output_file) self.assertEqual( x.autofinder(self.output_file + "_mcmctree.phy")[0], "phylip") def test_write_phy(self): self.aln_obj.write_to_file(["phylip"], output_file=self.output_file) self.assertEqual(x.autofinder(self.output_file + ".phy")[0], "phylip") def test_write_stockholm(self): self.aln_obj.write_to_file(["stockholm"], output_file=self.output_file) self.assertEqual( x.autofinder(self.output_file + ".stockholm")[0], "stockholm") def test_write_gphocs(self): self.aln_obj.write_to_file(["gphocs"], output_file=self.output_file) def test_write_ima2(self): ima2_params = [ima2_pop_file, "(1,2):3)4:5", "IS", "1"] self.aln_obj.write_to_file(["ima2"], output_file=self.output_file, ima2_params=ima2_params) def test_write_interleave(self): self.aln_obj.write_to_file(["phylip"], output_file=self.output_file, interleave=True) def test_write_upper_case_phy(self): self.aln_obj.write_to_file(["phylip"], output_file=self.output_file, upper_case=True) flag = True with open(self.output_file + ".phy") as fh: next(fh) for line in fh: seq = line.strip().split()[1] if not seq.isupper(): flag = False self.assertTrue(flag, True) def test_write_upper_case_fasta(self): self.aln_obj.write_to_file(["fasta"], output_file=self.output_file, upper_case=True) flag = True with open(self.output_file + ".fas") as fh: for line in fh: if line.startswith(">") or line.strip() == "": continue else: if not line.strip().isupper(): flag = False self.assertTrue(flag, True) def test_write_gap(self): self.aln_obj.write_to_file( ["fasta", "phylip", "nexus", "mcmctree", "stockholm", "gphocs"], output_file=self.output_file, gap="?") def test_write_model_phy(self): self.aln_obj.write_to_file( ["fasta", "phylip", "nexus", "mcmctree", "stockholm", "gphocs"], output_file=self.output_file, model_phylip="LG") def test_write_outgoup_list(self): self.aln_obj.write_to_file( ["fasta", "phylip", "nexus", "mcmctree", "stockholm", "gphocs"], output_file=self.output_file, outgroup_list=["spa", "spb"]) def test_write_use_charset(self): self.aln_obj.write_to_file( ["fasta", "phylip", "nexus", "mcmctree", "stockholm", "gphocs"], output_file=self.output_file, use_charset=False) def test_write_partition_file(self): self.aln_obj.write_to_file( ["fasta", "phylip", "nexus", "mcmctree", "stockholm", "gphocs"], output_file=self.output_file, partition_file=False) def test_write_output_dir(self): self.aln_obj.write_to_file( ["fasta", "phylip", "nexus", "mcmctree", "stockholm", "gphocs"], output_dir="test2") shutil.rmtree("test2") def test_write_ldhat(self): self.aln_obj.write_to_file( ["fasta", "phylip", "nexus", "mcmctree", "stockholm", "gphocs"], output_file=self.output_file, ld_hat=True) def test_write_snapp(self): self.aln_obj.clear_alignments() self.aln_obj.con.close() os.remove(sql_db) self.aln_obj = AlignmentList(variable_data, sql_db=sql_db) self.aln_obj.concatenate() self.aln_obj.write_to_file(["snapp"], output_file=self.output_file) with open(self.output_file + "_snapp.nex") as fh: res = sorted(fh.readlines()) with open(snapp_output[0]) as fh: ref = sorted(fh.readlines()) self.assertEqual(res, ref) def test_get_non_contiguous_partitions(self): self.aln_obj.partitions.merge_partitions( ["BaseConc1.fas", "BaseConc3.fas", "BaseConc7.fas"], "non_contiguous") self.aln_obj.write_to_file( ["mcmctree", "stockholm", "gphocs", "snapp"], output_file=self.output_file) def test_write_non_contiguous_partitions(self): self.aln_obj.partitions.merge_partitions( ["BaseConc1.fas", "BaseConc3.fas", "BaseConc7.fas"], "non_contiguous") self.aln_obj.write_to_file(["phylip", "nexus"], output_file=self.output_file)
def setUp(self): if not os.path.exists(temp_dir): os.makedirs(temp_dir) self.aln_obj = AlignmentList([], sql_db=sql_db)