def test_raxml_inputs(): # Nucleotide tester = Alb.AlignBuddy(resource("Mnemiopsis_cds.nex")) assert phylo_to_hash(Pb.generate_tree(tester, 'raxml')) == '706ba436f8657ef3aee7875217dd07c0' # Peptide tester = Alb.AlignBuddy(resource("Mnemiopsis_pep.nex")) assert phylo_to_hash(Pb.generate_tree(Alb.make_copy(tester), 'raxml')) == 'fc35569091eeba49ac4dcec7fc6890bf' # Quiet assert phylo_to_hash(Pb.generate_tree(tester, 'raxml', quiet=True)) == 'fc35569091eeba49ac4dcec7fc6890bf'
def test_fasttree_inputs(): temp_dir = MyFuncs.TempDir() # Nucleotide alignbuddy = Alb.AlignBuddy(resource("Mnemiopsis_cds.nex")) tester = Pb.generate_tree(Alb.make_copy(alignbuddy), 'fasttree', '-seed 12345') assert phylo_to_hash(tester) == 'd7f505182dd1a1744b45cc326096f70c' tester = Pb.generate_tree(alignbuddy, 'fasttree', '-seed 12345', quiet=True) assert phylo_to_hash(tester) == 'd7f505182dd1a1744b45cc326096f70c' alignbuddy = Alb.AlignBuddy(resource("Mnemiopsis_pep.nex")) tester = Pb.generate_tree(alignbuddy, 'fasttree', '-seed 12345', keep_temp="%s/new_dir" % temp_dir.path) assert phylo_to_hash(tester) == '57eace9bdd2074297cbd2692c1f4cd38'
def test_clustalomega(sb_resources, hf, monkeypatch): mock_tmp_dir = br.TempDir() tmp_dir = br.TempDir() shutil.copy("{1}mock_resources{0}test_clustalo{0}result".format(os.path.sep, RES_PATH), "%s%s" % (mock_tmp_dir.path, os.path.sep)) monkeypatch.setattr(Alb, "which", lambda *_: True) monkeypatch.setattr(Alb, "Popen", MockPopen) monkeypatch.setattr(br, "TempDir", lambda: mock_tmp_dir) # basic tester = sb_resources.get_one("d f") tester = Alb.generate_msa(tester, 'clustalomega') assert hf.buddy2hash(tester) == "f5afdc7c76ab822bdc95230329766aba", tester.write("temp.del") # quiet tester = sb_resources.get_one("d f") tester = Alb.generate_msa(tester, 'clustalomega', quiet=True) assert hf.buddy2hash(tester) == "f5afdc7c76ab822bdc95230329766aba", tester.write("temp.del") # params tester = sb_resources.get_one("d f") tester = Alb.generate_msa(tester, 'clustalomega', "--outfmt=nexus", quiet=True) assert hf.buddy2hash(tester) == "23d7c9fa33454ed551a5896e532cf552", tester.write("temp.del") tester = sb_resources.get_one("d f") tester = Alb.generate_msa(tester, 'clustalomega', "--outfmt=foobar", quiet=True) assert hf.buddy2hash(tester) == "f5afdc7c76ab822bdc95230329766aba", tester.write("temp.del") # keep monkeypatch.setattr(Sb, "hash_ids", mock_hash_ids) tester = sb_resources.get_one("d f") Alb.generate_msa(tester, 'clustalomega', keep_temp="%s%skeep_files" % (tmp_dir.path, os.path.sep)) root, dirs, files = next(os.walk("%s%skeep_files" % (tmp_dir.path, os.path.sep))) kept_output = "" for file in sorted(files): with open("%s%s%s" % (root, os.path.sep, file), "r", encoding="utf-8") as ifile: kept_output += ifile.read() assert hf.string2hash(kept_output) == "bad3a345e769d32672d39ee51df295f5"
def test_generate_alignments_genbank(sb_resources, hf): tester = sb_resources.get_one("p g") tester = Alb.generate_msa(tester, "mafft") assert hf.buddy2hash(tester) == "a4ab6b2a2ddda38a4d04abc18c54d18b"
def test_prank_inputs(sb_resources): # FASTA tester = Sb.pull_recs(sb_resources.get_one("d f"), 'α1') tester = Alb.generate_msa(tester, 'prank', '-once') assert tester.out_format == 'fasta'
def test_mafft_outputs(sb_resources, hf): # CLUSTAL tester = sb_resources.get_one("d f") tester = Alb.generate_msa(tester, 'mafft', '--clustalout') assert hf.buddy2hash(tester) == 'd6046c77e2bdb5683188e5de653affe5'
def test_generate_alignments_genbank(sb_resources, hf): tester = sb_resources.get_one("p g") tester = Alb.generate_msa(tester, "mafft") assert hf.buddy2hash(tester) == "ff3d1e474b1b1b76fdda02ebcb225cff"
def test_muscle_multi_param(sb_resources, hf): tester = sb_resources.get_one("d f") tester = Alb.generate_msa(tester, 'muscle', '-clw -diags') assert hf.buddy2hash(tester) == '91542667cef761ccaf39d8cb4e877944'
def test_pagan(sb_resources, hf): # FASTA tester = sb_resources.get_one("d f") tester = Alb.generate_msa(tester, 'pagan') assert hf.buddy2hash(tester) in [ 'da1c6bb365e2da8cb4e7fad32d7dafdb', '1219647676b359a5ad0be6d9dda81c73' ] # NEXUS tester = sb_resources.get_one("d f") tester = Alb.generate_msa(tester, 'pagan', '-f nexus') assert hf.buddy2hash(tester) in [ 'f93607e234441a2577fa7d8a387ef7ec', '42bfddd38fa4ed75a99841abf2112e54' ] # PHYLIPI tester = sb_resources.get_one("d f") tester = Alb.generate_msa(tester, 'pagan', '-f phylipi') assert hf.buddy2hash(tester) in [ '09dd492fde598670d7cfee61d4e2eab8', '438e1551b3f1c8526fc8a44eaf2a3dc1' ] # PHYLIPS tester = sb_resources.get_one("d f") tester = Alb.generate_msa(tester, 'pagan', '-f phylips') assert hf.buddy2hash(tester) in [ '249c88cb64d41c47388514c65bf8fff1', '6366e50da5a6b33d2d281d6ea13df0b7' ] # Multi-param tester = sb_resources.get_one("d f") tester = Alb.generate_msa(tester, 'pagan', '-f nexus --translate') assert hf.buddy2hash(tester) == 'dd140ec4eb895ce75d574498a58aa28a' # A few edge cases tester = sb_resources.get_one("d f") tester = Sb.pull_recs(tester, "α[2345]") Alb.generate_msa(tester, "pagan", "-f foo", quiet=True) tester = sb_resources.get_one("d f") tester = Sb.pull_recs(tester, "α[2345]") Alb.generate_msa(tester, "pagan", "-f nexus", quiet=True) tester = sb_resources.get_one("d f") tester = Sb.pull_recs(tester, "α[2345]") Alb.generate_msa(tester, "pagan", "-f phylipi", quiet=True)
def main(in_args): wsdl_url = "http://v2.topcons.net/pred/api_submitseq/?wsdl" fixtop = "" if os.path.isfile(in_args.input): try: seqbuddy = Sb.SeqBuddy(in_args.input, out_format="fasta") Sb.clean_seq(seqbuddy) # Sb.hash_ids(seqbuddy) except br.GuessError: print("Unable to read the provided input file, is it a properly formatted sequence file?") return 1 if len(str(seqbuddy)) >= MAX_FILESIZE: print("You input seqfile is too large! Please split the file into chunks less than %d Mb." % MAX_FILESIZE_IN_MB, file=sys.stderr) return 1 # ***** Here's the meat ***** # myclient = Client(wsdl_url, cache=None) ret_value = myclient.service.submitjob(str(seqbuddy), fixtop, in_args.jobname, in_args.email) if len(ret_value) >= 1: jobid, result_url, numseq_str, errinfo, warninfo = ret_value[0][:5] if jobid != "None" and jobid != "": print("You have successfully submitted your job with %s sequences. jobid = %s" % (numseq_str, jobid)) if warninfo != "" and warninfo != "None": print("Warning message:\n%s" % warninfo) else: print("Failed to submit job!") if errinfo != "" and errinfo != "None": print("Error message:\n%s" % errinfo) if warninfo != "" and warninfo != "None": print("Warning message:\n%s" % warninfo) else: print("Failed to submit job!") return 1 else: myclient = Client(wsdl_url, cache=None) ret_value = myclient.service.checkjob(in_args.input) if len(ret_value) >= 1: status, result_url, errinfo = ret_value[0][:3] if status == "Failed": print("Your job with jobid %s is failed!" % in_args.input) if errinfo != "" and errinfo != "None": print("Error message:\n%s" % errinfo) elif status == "Finished": print("Your job with jobid %s is finished!" % in_args.input) if not os.path.exists(in_args.outpath): try: os.makedirs(in_args.outpath) except OSError: print("Failed to create the outpath %s" % in_args.outpath) return 1 outfile = "%s/%s.zip" % (in_args.outpath, in_args.input) if not os.path.exists(outfile): print("Retrieving") urllib.request.urlretrieve(result_url, outfile) if os.path.exists(outfile): print("The result file %s has been retrieved for jobid %s" % (outfile, in_args.input)) else: print("Failed to retrieve result for jobid %s" % in_args.input) with zipfile.ZipFile(outfile) as zf: zf.extractall(in_args.outpath) with open("%s/%s/query.result.txt" % (in_args.outpath, in_args.input), "r") as ifile: topcons = ifile.read() topcons = topcons.split("##############################################################################")[2:-1] records = [] for rec in topcons: seq_id = re.search("Sequence name: (.*)", rec).group(1).strip() seq = re.search("Sequence:\n([A-Z]+)", rec).group(1).strip() alignment = "" for algorithm in ["TOPCONS", "OCTOPUS", "Philius", "PolyPhobius", "SCAMPI", "SPOCTOPUS"]: try: top_file = re.search("%s predicted topology:\n([ioMS]+)" % algorithm, rec).group(1).strip() alignment += ">%s\n%s\n\n" % (algorithm, top_file) except: print("%s: %s" % (seq_id, algorithm)) pass alignment = Alb.AlignBuddy(alignment) # print(alignment) Alb.consensus_sequence(alignment) cons_seq = Sb.SeqBuddy(">%s\n%s\n" % (seq_id, seq), out_format="genbank") counter = 1 for tmd in re.finditer("([MX]+)", str(alignment.records()[0].seq)): Sb.annotate(cons_seq, "TMD%s" % counter, "%s-%s" % (tmd.start(), tmd.end())) counter += 1 records.append(cons_seq.records[0]) seqbuddy = Sb.SeqBuddy(records, out_format="genbank") seqbuddy.write("%s/%s.gb" % (in_args.outpath, in_args.input)) elif status == "None": print("Your job with jobid %s does not exist! Please check you typing!" % in_args.input) else: print("Your job with jobid %s is not ready, status = %s" % (in_args.input, status)) else: print("Failed to get job!") return 1 return 0
def test_concat_alignments_ui(capsys, alb_resources, hf): test_in_args = deepcopy(in_args) test_in_args.concat_alignments = [[]] tester = Sb.SeqBuddy("%s/Cnidaria_pep.nexus" % hf.resource_path) Sb.pull_recs(tester, "Ccr|Cla|Hec") tester = Alb.AlignBuddy(str(tester)) tester.alignments.append(tester.alignments[0]) tester.set_format("genbank") Alb.command_line_ui(test_in_args, Alb.make_copy(tester), skip_exit=True) out, err = capsys.readouterr() assert hf.string2hash(out) == "d21940f3dad2295dd647f632825d8541" test_in_args.concat_alignments = [["(.).(.)-Panx(.)"]] Alb.command_line_ui(test_in_args, Alb.make_copy(tester), skip_exit=True) out, err = capsys.readouterr() assert hf.string2hash(out) == "5ac908ebf7918a45664a31da480fda58" test_in_args.concat_alignments = [["...", "Panx.*"]] Alb.command_line_ui(test_in_args, Alb.make_copy(tester), skip_exit=True) out, err = capsys.readouterr() assert hf.string2hash(out) == "e754350b0397cf54f531421d1e85774f" test_in_args.concat_alignments = [[3, "Panx.*"]] Alb.command_line_ui(test_in_args, Alb.make_copy(tester), skip_exit=True) out, err = capsys.readouterr() assert hf.string2hash(out) == "e754350b0397cf54f531421d1e85774f" test_in_args.concat_alignments = [[-9, "Panx.*"]] Alb.command_line_ui(test_in_args, Alb.make_copy(tester), skip_exit=True) out, err = capsys.readouterr() assert hf.string2hash(out) == "9d2886afc640d35618754e05223032a2" test_in_args.concat_alignments = [[3, 3]] Alb.command_line_ui(test_in_args, Alb.make_copy(tester), skip_exit=True) out, err = capsys.readouterr() assert hf.string2hash(out) == "4e4101f9b5a6d44d524a9783a8c4004b" test_in_args.concat_alignments = [[3, -3]] Alb.command_line_ui(test_in_args, Alb.make_copy(tester), skip_exit=True) out, err = capsys.readouterr() assert hf.string2hash(out) == "5d9d9ac8fae604be74c436e5f0b5b6db" Alb.command_line_ui(test_in_args, alb_resources.get_one("p o g"), skip_exit=True) out, err = capsys.readouterr() assert "Please provide at least two alignments." in err test_in_args.concat_alignments = [["foo"]] Alb.command_line_ui(test_in_args, alb_resources.get_one("m p c"), skip_exit=True) out, err = capsys.readouterr() assert "No match found for record" in err
def test_rename_ids(key, next_hash, alb_resources, hf): alignbuddy = alb_resources.get_one(key) Alb.rename(alignbuddy, 'Panx', 'Test', 0) assert hf.buddy2hash(alignbuddy) == next_hash, alignbuddy.write("error_files%s%s" % (next_hash, os.path.sep))
def test_translate1(key, next_hash, alb_resources, hf): alignbuddy = alb_resources.get_one(key) Alb.translate_cds(alignbuddy) assert hf.buddy2hash(alignbuddy) == next_hash, alignbuddy.write("error_files%s%s" % (next_hash, os.path.sep))
def test_pull_records(key, next_hash, alb_resources, hf): alignbuddy = alb_resources.get_one(key) Alb.pull_records(alignbuddy, "α[1-5]$|β[A-M]") assert hf.buddy2hash(alignbuddy) == next_hash, alignbuddy.write("error_files%s%s" % (next_hash, os.path.sep))
def test_cases(key, uc_hash, lc_hash, alb_resources, hf): tester = Alb.uppercase(alb_resources.get_one(key)) assert hf.buddy2hash(tester) == uc_hash, tester.write("error_files%s%s" % (uc_hash, os.path.sep)) tester = Alb.lowercase(tester) assert hf.buddy2hash(tester) == lc_hash, tester.write("error_files%s%s" % (lc_hash, os.path.sep))
def test_concat_alignments(alb_resources, hf): with pytest.raises(AttributeError) as e: Alb.concat_alignments(alb_resources.get_one("p o g"), '.*') assert "Please provide at least two alignments." in str(e) tester = alb_resources.get_one("o p g") tester.alignments.append(alb_resources.get_one("o p g").alignments[0]) with pytest.raises(ValueError) as e: Alb.concat_alignments(tester, 'foo') assert "No match found for record" in str(e) with pytest.raises(ValueError) as e: Alb.concat_alignments(tester, 'Panx') assert "Replicate matches" in str(e) tester = Sb.SeqBuddy("%sCnidaria_pep.nexus" % hf.resource_path) Sb.pull_recs(tester, "Ccr|Cla|Hec") tester = Alb.AlignBuddy(str(tester)) tester.alignments.append(tester.alignments[0]) assert hf.buddy2hash(Alb.concat_alignments(Alb.make_copy(tester))) == '32a507107b7dcd044ea7760c8812441c' tester.set_format("gb") tester2 = Alb.concat_alignments(Alb.make_copy(tester), "(.).(.)-Panx(.)") assert hf.buddy2hash(tester2) == 'cd2b6594b22c431aea67fa45899f933a' tester.set_format("gb") tester2 = Alb.concat_alignments(Alb.make_copy(tester), "(.).(.)-Panx(.)") assert hf.buddy2hash(tester2) == 'cd2b6594b22c431aea67fa45899f933a' tester.set_format("gb") tester2 = Alb.concat_alignments(Alb.make_copy(tester), "...", "Panx.*") assert hf.buddy2hash(tester2) == 'e49b26f695c910a93f93d70563fd9dd9' tester.set_format("gb") tester2 = Alb.concat_alignments(Alb.make_copy(tester), "...", "(P)an(x)(.)") assert hf.buddy2hash(tester2) == '3abfdf2217050ac2170c0de27352a8c6' shorten = Alb.delete_records(Alb.make_copy(tester), "Ccr") tester.alignments[1] = shorten.alignments[1] assert hf.buddy2hash(Alb.concat_alignments(Alb.make_copy(tester))) == '685f24ee1fc88860dd9465035040c91e'
def test_phylip_sequential_read(alb_odd_resources, hf, capsys): records = br.phylip_sequential_read( open("{0}Mnemiopsis_cds.physr".format(RESOURCE_PATH), "r", encoding="utf-8").read()) buddy = Alb.AlignBuddy(records, out_format="phylipsr") assert hf.buddy2hash( buddy) == "c5fb6a5ce437afa1a4004e4f8780ad68", buddy.write("temp.del") records = br.phylip_sequential_read( open("{0}Mnemiopsis_cds.physs".format(RESOURCE_PATH), "r", encoding="utf-8").read(), relaxed=False) buddy = Alb.AlignBuddy(records, out_format="phylipss") assert hf.buddy2hash(buddy) == "4c0c1c0c63298786e6fb3db1385af4d5" with open(alb_odd_resources['dna']['single']['phylipss_cols'], "r", encoding="utf-8") as ifile: records = ifile.read() with pytest.raises(br.PhylipError) as err: br.phylip_sequential_read(records) assert "Malformed Phylip --> Less sequence found than expected" in str(err) with open(alb_odd_resources['dna']['single']['phylipss_recs'], "r", encoding="utf-8") as ifile: records = ifile.read() with pytest.raises(br.PhylipError) as err: br.phylip_sequential_read(records) assert "Malformed Phylip --> 9 sequences expected, 4 found." in str(err) capsys.readouterr() records = """ 3 15 Mle-Panxα4 M--VIE---------A Mle-Panxα8 M--VLE---------A Mle-Panxα6 M--LLE----------A """ with pytest.raises(br.PhylipError) as err: br.phylip_sequential_read(records) assert "Malformed Phylip --> Sequence Mle-Panxα4 has 16 columns, 15 expected." in str( err) records = """ 3 15 Mle-Panxα4 M--VIE--------A Mle-Panxα8 M--VLE--------A Mle-Panxα8 M--LLE--------A """ with pytest.raises(br.PhylipError) as err: br.phylip_sequential_read(records) assert "Malformed Phylip --> Repeat ID Mle-Panxα8." in str(err) records = """ 3 15 Mle-Panxα4M--VIE--------A Mle-Panxα8M--VLE--------A Mle-Panxα8M--LLE--------A """ with pytest.raises(br.PhylipError) as err: br.phylip_sequential_read(records, relaxed=False) assert "Malformed Phylip --> Repeat id 'Mle-Panxα8' after strict truncation. " in str( err)
def test_lowercase_ui(capsys, alb_resources, hf): test_in_args = deepcopy(in_args) test_in_args.lowercase = True Alb.command_line_ui(test_in_args, alb_resources.get_one("m p s"), skip_exit=True) out, err = capsys.readouterr() assert hf.string2hash(out) == "00661f7afb419c6bb8c9ac654af7c976"
def test_muscle_outputs(sb_resources, hf): # FASTA tester = sb_resources.get_one("d f") tester = Alb.generate_msa(tester, 'muscle', '-clw') assert hf.buddy2hash(tester) == '91542667cef761ccaf39d8cb4e877944'
def test_map_features2alignment_ui(capsys, alb_resources, sb_resources, hf): test_in_args = deepcopy(in_args) test_in_args.mapfeat2align = [sb_resources.get_one("d g", "paths")] Alb.command_line_ui(test_in_args, alb_resources.get_one("o d n"), skip_exit=True) out, err = capsys.readouterr() assert hf.string2hash(out) == "9fece109249f4d787c13e6fb2742843d"
def test_clustalw_multi_param(sb_resources, hf): tester = sb_resources.get_one("d f") tester = Alb.generate_msa(tester, clustalw_bin, '-output=phylip -noweights') assert hf.buddy2hash(tester) == 'ae9126eb8c482a82d4060d175803c478'
def test_uppercase_ui(capsys, alb_resources, hf): test_in_args = deepcopy(in_args) test_in_args.uppercase = True Alb.command_line_ui(test_in_args, alb_resources.get_one("m p s"), skip_exit=True) out, err = capsys.readouterr() assert hf.string2hash(out) == "6f3f234d796520c521cb85c66a3e239a"
def test_mafft_inputs(sb_resources, hf): # FASTA tester = sb_resources.get_one("d f") tester = Alb.generate_msa(tester, 'mafft') assert hf.buddy2hash(tester) == 'f94e0fd591dad83bd94201f0af038904'
def test_argparse_init(capsys, monkeypatch, alb_resources, hf, alb_odd_resources): monkeypatch.setattr(sys, 'argv', ['AlignBuddy.py', alb_resources.get_one("o p py", "paths"), "-con", "-o", "stockholm"]) temp_in_args, alignbuddy = Alb.argparse_init() assert hf.buddy2hash(alignbuddy) == "5d9a03d9e1b4bf72d991257d3a696306" monkeypatch.setattr(sys, 'argv', ['AlignBuddy.py', alb_resources.get_one("o p py", "paths"), "-con", "-o", "foo"]) with pytest.raises(SystemExit): Alb.argparse_init() out, err = capsys.readouterr() assert "Format type 'foo' is not recognized/supported" in err monkeypatch.setattr(sys, 'argv', ['AlignBuddy.py', alb_odd_resources["dna"]["single"]["fasta"], "-con"]) with pytest.raises(SystemExit): Alb.argparse_init() out, err = capsys.readouterr() assert "GuessError: Could not determine format from _input file" in err monkeypatch.setattr(sys, 'argv', ['AlignBuddy.py', alb_odd_resources["dna"]["single"]["fasta"], "-con", "-f", "phylip"]) with pytest.raises(SystemExit): Alb.argparse_init() out, err = capsys.readouterr() assert "ValueError: First line should have two integers" in err monkeypatch.setattr(sys, 'argv', ['AlignBuddy.py', alb_odd_resources["dna"]["single"]["phylipss_recs"], "-con", "-f", "phylipss"]) with pytest.raises(SystemExit): Alb.argparse_init() out, err = capsys.readouterr() assert "PhylipError: Malformed Phylip --> 9 sequences expected, 8 found." in err monkeypatch.setattr(sys, 'argv', ['AlignBuddy.py', alb_resources.get_one("o p py", "paths"), "-con", "-f", "foo"]) with pytest.raises(SystemExit): Alb.argparse_init() out, err = capsys.readouterr() assert "TypeError: Format type 'foo' is not recognized/supported" in err monkeypatch.setattr(sys, 'argv', ['AlignBuddy.py', alb_resources.get_one("o p f", "paths"), "--quiet", "--generate_alignment", "mafft", "--reorder"]) temp_in_args, alignbuddy = Alb.argparse_init() assert alignbuddy == [] monkeypatch.setattr(sys, 'argv', ['AlignBuddy.py', alb_resources.get_one("o p f", "paths"), "--generate_alignment", "mafft", "--op", "5", "--quiet"]) temp_in_args, alignbuddy = Alb.argparse_init() assert temp_in_args.generate_alignment == [['mafft', ' --op', '5']] monkeypatch.setattr(sys, 'argv', ['AlignBuddy.py', alb_resources.get_one("o p f", "paths"), "--generate_alignment", "mafft", "-q"]) temp_in_args, alignbuddy = Alb.argparse_init() assert temp_in_args.generate_alignment == [['mafft']]
def test_mafft_multi_param(sb_resources, hf): tester = sb_resources.get_one("d f") tester = Alb.generate_msa(tester, 'mafft', '--clustalout --noscore') assert hf.buddy2hash(tester) == 'd6046c77e2bdb5683188e5de653affe5'
def test_prank_outputs3(sb_resources): # PHYLIPS tester = Sb.pull_recs(sb_resources.get_one("d f"), 'α1') tester = Alb.generate_msa(tester, 'prank', params='-f=phylips -once') assert tester.out_format == 'phylipsr'
def test_generate_alignments_edges2(tool, params, sb_resources): tester = sb_resources.get_one("d f") tester = Sb.pull_recs(tester, "α[2345]") Alb.generate_msa(tester, tool, params, quiet=True)
def test_extract_regions(key, next_hash, alb_resources, hf): tester = Alb.extract_regions(alb_resources.get_one(key), "0:50") assert hf.buddy2hash(tester) == next_hash, print(tester)
def test_prank_outputs1(sb_resources): # NEXUS tester = Sb.pull_recs(sb_resources.get_one("d f"), 'α1') tester = Alb.generate_msa(tester, 'prank', '-f=nexus -once') assert tester.out_format == 'nexus'
def test_faux_alignment(key, next_hash, sb_resources, hf): tester = Alb.faux_alignment(sb_resources.get_one(key), r_seed=12345) assert hf.buddy2hash(tester) == next_hash
def test_muscle_inputs(sb_resources, hf): # FASTA tester = sb_resources.get_one("d f") tester = Alb.generate_msa(tester, 'muscle') assert hf.buddy2hash(tester) == '5ec18f3e0c9f5cf96944a1abb130232f'
def test_enforce_triplets(key, next_hash, alb_resources, hf): tester = Alb.enforce_triplets(alb_resources.get_one(key)) assert hf.buddy2hash(tester) == next_hash