def test_binary_nary_tree(): factory = GraphFactory(31, 1e5, 4) trees = {} trees[2] = SBT(factory) trees[5] = SBT(factory, d=5) trees[10] = SBT(factory, d=10) n_leaves = 0 for f in utils.SIG_FILES: sig = next(signature.load_signatures(utils.get_test_data(f))) leaf = SigLeaf(os.path.basename(f), sig) for tree in trees.values(): tree.add_node(leaf) to_search = leaf n_leaves += 1 assert all([len(t.leaves()) == n_leaves for t in trees.values()]) results = {} print('*' * 60) print("{}:".format(to_search.metadata)) for d, tree in trees.items(): results[d] = { str(s) for s in tree.find(search_minhashes, to_search.data, 0.1) } print(*results[2], sep='\n') assert results[2] == results[5] assert results[5] == results[10]
def test_sbt_fsstorage(): factory = GraphFactory(31, 1e5, 4) with utils.TempDirectory() as location: tree = SBT(factory) for f in utils.SIG_FILES: sig = next(signature.load_signatures(utils.get_test_data(f))) leaf = SigLeaf(os.path.basename(f), sig) tree.add_node(leaf) to_search = leaf print('*' * 60) print("{}:".format(to_search.metadata)) old_result = {str(s) for s in tree.find(search_minhashes, to_search.data, 0.1)} print(*old_result, sep='\n') with FSStorage(os.path.join(location, '.fstree')) as storage: tree.save(os.path.join(location, 'tree'), storage=storage) tree = SBT.load(os.path.join(location, 'tree'), leaf_loader=SigLeaf.load) print('*' * 60) print("{}:".format(to_search.metadata)) new_result = {str(s) for s in tree.find(search_minhashes, to_search.data, 0.1)} print(*new_result, sep='\n') assert old_result == new_result assert os.path.exists(os.path.join(location, tree.storage.path)) assert os.path.exists(os.path.join(location, '.fstree'))
def test_tree_save_load(n_children): factory = GraphFactory(31, 1e5, 4) tree = SBT(factory, d=n_children) for f in utils.SIG_FILES: sig = next(signature.load_signatures(utils.get_test_data(f))) leaf = SigLeaf(os.path.basename(f), sig) tree.add_node(leaf) to_search = leaf print('*' * 60) print("{}:".format(to_search.metadata)) old_result = { str(s) for s in tree.find(search_minhashes, to_search.data, 0.1) } print(*old_result, sep='\n') with utils.TempDirectory() as location: tree.save(os.path.join(location, 'demo')) tree = SBT.load(os.path.join(location, 'demo'), leaf_loader=SigLeaf.load) print('*' * 60) print("{}:".format(to_search.metadata)) new_result = { str(s) for s in tree.find(search_minhashes, to_search.data, 0.1) } print(*new_result, sep='\n') assert old_result == new_result
def sbt_index(args): from sourmash_lib.sbt import SBT, GraphFactory from sourmash_lib.sbtmh import search_minhashes, SigLeaf parser = argparse.ArgumentParser() parser.add_argument('sbt_name', help='name to save SBT into') parser.add_argument('signatures', nargs='+', help='signatures to load into SBT') parser.add_argument('-k', '--ksize', type=int, default=None) parser.add_argument('--traverse-directory', action='store_true') parser.add_argument('-x', '--bf-size', type=float, default=1e5) sourmash_args.add_moltype_args(parser) args = parser.parse_args(args) moltype = sourmash_args.calculate_moltype(args) factory = GraphFactory(1, args.bf_size, 4) tree = SBT(factory) if args.traverse_directory: inp_files = list(sourmash_args.traverse_find_sigs(args.signatures)) else: inp_files = list(args.signatures) notify('loading {} files into SBT', len(inp_files)) n = 0 ksizes = set() moltypes = set() for f in inp_files: siglist = sig.load_signatures(f, select_ksize=args.ksize, select_moltype=moltype) # load all matching signatures in this file for ss in siglist: ksizes.add(ss.estimator.ksize) moltypes.add(sourmash_args.get_moltype(ss)) leaf = SigLeaf(ss.md5sum(), ss) tree.add_node(leaf) n += 1 # check to make sure we aren't loading incompatible signatures if len(ksizes) > 1 or len(moltypes) > 1: error('multiple k-mer sizes or molecule types present; fail.') error('specify --dna/--protein and --ksize as necessary') error('ksizes: {}; moltypes: {}', ", ".join(map(str, ksizes)), ", ".join(moltypes)) sys.exit(-1) # did we load any!? if n == 0: error('no signatures found to load into tree!? failing.') sys.exit(-1) notify('loaded {} sigs; saving SBT under "{}"', n, args.sbt_name) tree.save(args.sbt_name)
def test_simple(n_children): factory = GraphFactory(5, 100, 3) root = SBT(factory, d=n_children) leaf1 = Leaf("a", factory()) leaf1.data.count('AAAAA') leaf1.data.count('AAAAT') leaf1.data.count('AAAAC') leaf2 = Leaf("b", factory()) leaf2.data.count('AAAAA') leaf2.data.count('AAAAT') leaf2.data.count('AAAAG') leaf3 = Leaf("c", factory()) leaf3.data.count('AAAAA') leaf3.data.count('AAAAT') leaf3.data.count('CAAAA') leaf4 = Leaf("d", factory()) leaf4.data.count('AAAAA') leaf4.data.count('CAAAA') leaf4.data.count('GAAAA') leaf5 = Leaf("e", factory()) leaf5.data.count('AAAAA') leaf5.data.count('AAAAT') leaf5.data.count('GAAAA') root.add_node(leaf1) root.add_node(leaf2) root.add_node(leaf3) root.add_node(leaf4) root.add_node(leaf5) def search_kmer(obj, seq): return obj.data.get(seq) leaves = [leaf1, leaf2, leaf3, leaf4, leaf5] kmers = ["AAAAA", "AAAAT", "AAAAG", "CAAAA", "GAAAA"] def search_kmer_in_list(kmer): x = [] for l in leaves: if l.data.get(kmer): x.append(l) return set(x) for kmer in kmers: assert set(root.find(search_kmer, kmer)) == search_kmer_in_list(kmer) print('-----') print([x.metadata for x in root.find(search_kmer, "AAAAA")]) print([x.metadata for x in root.find(search_kmer, "AAAAT")]) print([x.metadata for x in root.find(search_kmer, "AAAAG")]) print([x.metadata for x in root.find(search_kmer, "CAAAA")]) print([x.metadata for x in root.find(search_kmer, "GAAAA")])
def test_simple_index(n_children): factory = GraphFactory(5, 100, 3) root = SBT(factory, d=n_children) leaf1 = Leaf("a", factory()) leaf1.data.count("AAAAA") leaf1.data.count("AAAAT") leaf1.data.count("AAAAC") leaf2 = Leaf("b", factory()) leaf2.data.count("AAAAA") leaf2.data.count("AAAAT") leaf2.data.count("AAAAG") leaf3 = Leaf("c", factory()) leaf3.data.count("AAAAA") leaf3.data.count("AAAAT") leaf3.data.count("CAAAA") leaf4 = Leaf("d", factory()) leaf4.data.count("AAAAA") leaf4.data.count("CAAAA") leaf4.data.count("GAAAA") leaf5 = Leaf("e", factory()) leaf5.data.count("AAAAA") leaf5.data.count("AAAAT") leaf5.data.count("GAAAA") root.add_node(leaf1) root.add_node(leaf2) root.add_node(leaf3) root.add_node(leaf4) root.add_node(leaf5) def search_kmer(obj, seq): return obj.data.get(seq) kmers = ["AAAAA", "AAAAT", "AAAAG", "CAAAA", "GAAAA"] linear = LinearIndex() linear.insert(leaf1) linear.insert(leaf2) linear.insert(leaf3) linear.insert(leaf4) linear.insert(leaf5) for kmer in kmers: assert set(root.find(search_kmer, kmer)) == set(linear.find(search_kmer, kmer)) print("-----") print([x.metadata for x in root.find(search_kmer, "AAAAA")]) print([x.metadata for x in root.find(search_kmer, "AAAAT")]) print([x.metadata for x in root.find(search_kmer, "AAAAG")]) print([x.metadata for x in root.find(search_kmer, "CAAAA")]) print([x.metadata for x in root.find(search_kmer, "GAAAA")])
def test_longer_search(n_children): ksize = 5 factory = GraphFactory(ksize, 100, 3) root = SBT(factory, d=n_children) leaf1 = Leaf("a", factory()) leaf1.data.count('AAAAA') leaf1.data.count('AAAAT') leaf1.data.count('AAAAC') leaf2 = Leaf("b", factory()) leaf2.data.count('AAAAA') leaf2.data.count('AAAAT') leaf2.data.count('AAAAG') leaf3 = Leaf("c", factory()) leaf3.data.count('AAAAA') leaf3.data.count('AAAAT') leaf3.data.count('CAAAA') leaf4 = Leaf("d", factory()) leaf4.data.count('AAAAA') leaf4.data.count('CAAAA') leaf4.data.count('GAAAA') leaf5 = Leaf("e", factory()) leaf5.data.count('AAAAA') leaf5.data.count('AAAAT') leaf5.data.count('GAAAA') root.add_node(leaf1) root.add_node(leaf2) root.add_node(leaf3) root.add_node(leaf4) root.add_node(leaf5) def kmers(k, seq): for start in range(len(seq) - k + 1): yield seq[start:start + k] def search_transcript(node, seq, threshold): presence = [node.data.get(kmer) for kmer in kmers(ksize, seq)] if sum(presence) >= int(threshold * (len(seq) - ksize + 1)): return 1 return 0 try1 = [x.metadata for x in root.find(search_transcript, "AAAAT", 1.0)] assert set(try1) == set(['a', 'b', 'c', 'e']), try1 # no 'd' try2 = [x.metadata for x in root.find(search_transcript, "GAAAAAT", 0.6)] assert set(try2) == set(['a', 'b', 'c', 'd', 'e']) try3 = [x.metadata for x in root.find(search_transcript, "GAAAA", 1.0)] assert set(try3) == set(['d', 'e']), try3
def sbt_index(client, db, cell, query, ksize, nsketch, key, file): '''Create a sequence Bloom tree from a cell/ database cursor. 1. select seqs for tree 2. assign common id (field derivative.minhash.sbt.ids) 3. minhash seqs, name == UUID, md5? (think about SBT reuse) 4. query a different collection/ metagenome against this --index {raw, minhash} input: all of cell or cursor \b $ zoo sbt_index --db ref --cell ref --ksize 16 --nsketch 1000 \ reference Initialize SBT. Compute minhash signatures for selected documents. k-mer size: 16, sketch size: 1000 \ 9158 Elapsed Time: 0:01:45 Save SBT. Done. \b $ sourmash sbt_search --ksize 16 reference survey.fa.sig # running sourmash subcommand: sbt_search loaded query: survey.fa... (k=16, DNA) 0.11 0ef85591-d464-4953-915f-f673907b7e8e (Zika reference genome) TODO: add query TODO: --key arg not working? ''' c = MongoClient(client)[db][cell] print('Initialize SBT.') # init SBT factory = GraphFactory(ksize=ksize, starting_size=1e5, n_tables=4) # 4 .. nt? tree = SBT(factory, d=2) # d .. see "n-ary " in notebook print('Compute minhash signatures for selected documents.') print('{}{}{}{}'.format( 'k-mer size: ', ksize, ', sketch size: ', nsketch )) bar = ProgressBar(max_value=UnknownLength) counter = 0 for d in c.find(): counter += 1 e = Estimators(ksize=ksize, n=nsketch) e.add_sequence(d['sequence'], force=True) s = SourmashSignature(email='', estimator=e, name=deep_get(d, key)) leaf = SigLeaf(metadata=deep_get(d, key), data=s) tree.add_node(node=leaf) bar.update(counter) print('\nSave SBT.') tree.save(file) print('Done.')
def sbt_index(self, args): from sourmash_lib.sbt import SBT, GraphFactory from sourmash_lib.sbtmh import search_minhashes, SigLeaf parser = argparse.ArgumentParser() parser.add_argument('sbt_name') parser.add_argument('signatures', nargs='+') parser.add_argument('-k', '--ksize', type=int, default=DEFAULT_K) parser.add_argument('--traverse-directory', action='store_true') parser.add_argument('-x', '--bf-size', type=float, default=1e5) sourmash_args.add_moltype_args(parser) args = parser.parse_args(args) if args.protein: if args.dna is True: raise Exception('cannot specify both --dna and --protein!') args.dna = False moltype = 'protein' else: args.dna = True moltype = 'dna' factory = GraphFactory(1, args.bf_size, 4) tree = SBT(factory) inp_files = list(args.signatures) if args.traverse_directory: inp_files = [] for dirname in args.signatures: for root, dirs, files in os.walk(dirname): for name in files: if name.endswith('.sig'): fullname = os.path.join(root, name) inp_files.append(fullname) print('loading {} files into SBT'.format(len(inp_files))) n = 0 for f in inp_files: s = sig.load_signatures(f, select_ksize=args.ksize, select_moltype=moltype) for ss in s: leaf = SigLeaf(ss.md5sum(), ss) tree.add_node(leaf) n += 1 print('loaded {} sigs; saving SBT under "{}".'.format(n, args.sbt_name)) tree.save(args.sbt_name)
def test_sbt_combine(n_children): factory = GraphFactory(31, 1e5, 4) tree = SBT(factory, d=n_children) tree_1 = SBT(factory, d=n_children) tree_2 = SBT(factory, d=n_children) n_leaves = 0 for f in utils.SIG_FILES: sig = next(signature.load_signatures(utils.get_test_data(f))) leaf = SigLeaf(os.path.basename(f), sig) tree.add_node(leaf) if n_leaves < 4: tree_1.add_node(leaf) else: tree_2.add_node(leaf) n_leaves += 1 tree_1.combine(tree_2) t1_leaves = {str(l) for l in tree_1.leaves()} t_leaves = {str(l) for l in tree.leaves()} assert len(t1_leaves) == n_leaves assert len(t_leaves) == len(t1_leaves) assert t1_leaves == t_leaves to_search = next(signature.load_signatures( utils.get_test_data(utils.SIG_FILES[0]))) t1_result = {str(s) for s in tree_1.find(search_minhashes, to_search, 0.1)} tree_result = {str(s) for s in tree.find(search_minhashes, to_search, 0.1)} assert t1_result == tree_result # TODO: save and load both trees # check if adding a new node will use the next empty position next_empty = 0 for n, d in tree_1.nodes.items(): if d is None: next_empty = n break if not next_empty: next_empty = n + 1 tree_1.add_node(leaf) assert tree_1.max_node == next_empty
def test_search_minhashes(): factory = GraphFactory(31, 1e5, 4) tree = SBT(factory) n_leaves = 0 for f in utils.SIG_FILES: sig = next(signature.load_signatures(utils.get_test_data(f))) leaf = SigLeaf(os.path.basename(f), sig) tree.add_node(leaf) to_search = next(iter(tree.leaves())) # this fails if 'search_minhashes' is calc containment and not similarity. results = tree.find(search_minhashes, to_search.data, 0.08) for leaf in results: assert to_search.data.similarity(leaf.data) >= 0.08 print(results)
def test_save_sparseness(n_children): factory = GraphFactory(31, 1e5, 4) tree = SBT(factory, d=n_children) for f in utils.SIG_FILES: sig = next(signature.load_signatures(utils.get_test_data(f))) leaf = SigLeaf(os.path.basename(f), sig) tree.add_node(leaf) to_search = leaf print('*' * 60) print("{}:".format(to_search.metadata)) old_result = { str(s) for s in tree.find(search_minhashes, to_search.data, 0.1) } print(*old_result, sep='\n') with utils.TempDirectory() as location: tree.save(os.path.join(location, 'demo'), sparseness=1.0) tree_loaded = SBT.load(os.path.join(location, 'demo'), leaf_loader=SigLeaf.load) assert all(not isinstance(n, Node) for n in tree_loaded.nodes.values()) print('*' * 60) print("{}:".format(to_search.metadata)) new_result = { str(s) for s in tree_loaded.find(search_minhashes, to_search.data, 0.1) } print(*new_result, sep='\n') assert old_result == new_result for pos, node in list(tree_loaded.nodes.items()): # Every parent of a node must be an internal node (and not a leaf), # except for node 0 (the root), whose parent is None. if pos != 0: assert isinstance(tree_loaded.parent(pos).node, Node) # Leaf nodes can't have children if isinstance(node, Leaf): assert all(c.node is None for c in tree_loaded.children(pos))
def test_sbt_ipfsstorage(): ipfsapi = pytest.importorskip('ipfsapi') factory = GraphFactory(31, 1e5, 4) with utils.TempDirectory() as location: tree = SBT(factory) for f in utils.SIG_FILES: sig = next(signature.load_signatures(utils.get_test_data(f))) leaf = SigLeaf(os.path.basename(f), sig) tree.add_node(leaf) to_search = leaf print('*' * 60) print("{}:".format(to_search.metadata)) old_result = { str(s) for s in tree.find(search_minhashes, to_search.data, 0.1) } print(*old_result, sep='\n') try: with IPFSStorage() as storage: tree.save(os.path.join(location, 'tree'), storage=storage) except ipfsapi.exceptions.ConnectionError: pytest.xfail("ipfs not installed/functioning probably") with IPFSStorage() as storage: tree = SBT.load(os.path.join(location, 'tree'), leaf_loader=SigLeaf.load, storage=storage) print('*' * 60) print("{}:".format(to_search.metadata)) new_result = { str(s) for s in tree.find(search_minhashes, to_search.data, 0.1) } print(*new_result, sep='\n') assert old_result == new_result
force=True) db.ref.insert_one(r) with open('ref.json', 'w+') as outjson: outjson.write(dumps(db.ref.find(), indent=4)) from sourmash_lib import Estimators from sourmash_lib.sbt import SBT, GraphFactory from sourmash_lib.sbtmh import SigLeaf, search_minhashes from sourmash_lib.signature import SourmashSignature KSIZE = 16 N = 1000 # init SBT factory = GraphFactory(ksize=KSIZE, starting_size=1e5, n_tables=4) # 4 .. nt? tree = SBT(factory, d=2) # d .. see "n-ary " in notebook bar = progressbar.ProgressBar(max_value=progressbar.UnknownLength) cursor = db.ref.find() c = 0 for i in cursor: key = deep_get(i, 'metadata.alt_id.gb') seq = i['sequence'] # db.ref.find_one()['sequence'] # 'ACTG...' e = Estimators(ksize=KSIZE, n=N) e.add_sequence(seq, force=True) # e.get_hashes() s = SourmashSignature(email='', estimator=e, name=key) leaf = SigLeaf(metadata=key, data=s) tree.add_node(node=leaf)