def test_compact_index_with_different_types(self, tmpdir): db = self._db(os.path.join(str(tmpdir), "db")) ind_id = UniqueHashIndex(db.path, "id") db.set_indexes([ind_id]) db.create() l = [] for i in range(10): c = dict(i=i) c.update(db.insert(c)) l.append(c) # with name for i in range(10): curr = l[i] c = db.get("id", curr["_id"]) c["update"] = True c.update(db.update(c)) db.compact_index(ind_id.name) for j in range(10): curr = l[j] c = db.get("id", curr["_id"]) assert c["_id"] == curr["_id"] assert c["i"] == j # with instance for i in range(10): curr = l[i] c = db.get("id", curr["_id"]) c["update"] = True c.update(db.update(c)) ind_id = db.indexes_names[ind_id.name] db.compact_index(ind_id) for j in range(10): curr = l[j] c = db.get("id", curr["_id"]) assert c["_id"] == curr["_id"] assert c["i"] == j # with different instance for i in range(10): curr = l[i] c = db.get("id", curr["_id"]) c["update"] = True c.update(db.update(c)) with pytest.raises(DatabaseException): db.compact_index(UniqueHashIndex(db.path, "id")) for j in range(10): curr = l[j] c = db.get("id", curr["_id"]) assert c["_id"] == curr["_id"] assert c["i"] == j db.close()
def test_add_duplicate_index_throws_exception(self, tmpdir): db = self._db(os.path.join(str(tmpdir), "db")) db.set_indexes([UniqueHashIndex(db.path, "id")]) db.create() new_index = UniqueHashIndex(db.path, "another_index") db.add_index(new_index) new_index = UniqueHashIndex(db.path, "another_index") with pytest.raises(IndexException): db.add_index(new_index)
def test_get_after_delete(self, tmpdir): db = self._db(os.path.join(str(tmpdir), "db")) db.set_indexes( [UniqueHashIndex(db.path, "id"), CustomHashIndex(db.path, "custom")] ) db.create() for x in range(100): doc = dict(test=6) doc.update(db.insert(doc)) if doc["test"] > 5: self.counter["r"] += 1 else: self.counter["l"] += 1 counted_bef = db.count(db.all, "custom") elem = db.all("custom", with_doc=True, limit=1) doc = next(elem)["doc"] assert db.delete(doc) == True counted_aft = db.count(db.all, "custom") assert counted_bef - 1 == counted_aft from_ind = db.get("custom", 1, with_doc=True) assert from_ind["doc"]["_id"] != doc["_id"] alls = db.all("custom", with_doc=True, limit=90) for curr in alls: assert db.delete(curr["doc"]) == True from_ind = db.get("custom", 1, with_doc=True) assert from_ind["doc"] != {}
def test_add_new_index_to_existing_db(self, tmpdir): db = self._db(os.path.join(str(tmpdir), "db")) db.set_indexes([UniqueHashIndex(db.path, "id")]) db.create() new_index = UniqueHashIndex(db.path, "unique_hash_index") db.add_index(new_index) new_index = Md5Index(db.path, "md5_index") db.add_index(new_index) new_index = WithAIndex(db.path, "with_a_index") db.add_index(new_index) assert len(db.indexes) == 4
def test_all_same_keys(self, tmpdir, inserts): db = self._db(os.path.join(str(tmpdir), "db")) db.set_indexes([UniqueHashIndex(db.path, "id"), WithAIndex(db.path, "with_a")]) db.create() l = 0 r = 0 z = 0 data = [] for i in range(inserts): a = random.randint(0, 10) if a > 5: r += 1 elif a == 0: z += 1 else: l += 1 c = dict(a=a) db.insert(c) data.append(c) assert l + r + z == db.count(db.all, "id") assert l + r == db.count(db.all, "with_a")
def test_custom_index_2(self, tmpdir): db = self._db(os.path.join(str(tmpdir), "db")) db.set_indexes([UniqueHashIndex(db.path, "id"), WithAIndex(db.path, "with_a")]) db.create() all_ins = [] for i in range(10): curr = dict(something="other") if i % 2 == 0: curr["a"] = str(i) all_ins.append(curr) db.insert(curr) l_0 = len(list(db.all("id"))) l_1 = len(list(db.all("with_a"))) assert l_1 != l_0 all_a = list(db.all("with_a", with_doc=True)) curr_a = all_a[0]["doc"] del curr_a["a"] db.update(curr_a) l_2 = len(list(db.all("with_a"))) assert l_2 + 1 == l_1 curr_a = all_a[-1]["doc"] db.delete(curr_a) l_3 = len(list(db.all("with_a"))) assert l_3 + 2 == l_1
def test_compact2(self, tmpdir): db = self._db(os.path.join(str(tmpdir), "db")) db.set_indexes([UniqueHashIndex(db.path, "id")]) db.create() l = [] for i in range(10): if i % 2 == 0: c = dict(i=i, even=True) else: c = dict(i=i) c.update(db.insert(c)) l.append(c) for i, curr in enumerate(db.all("id")): if i % 2: db.delete(curr) db.compact() db.compact() db.compact() assert len(list(db.all("id", with_doc=True))) == 5 for curr in db.all("id"): db.delete(curr) assert len(list(db.all("id", with_doc=True))) == 0
def test_compact(self, tmpdir): db = self._db(os.path.join(str(tmpdir), "db")) db.set_indexes([UniqueHashIndex(db.path, "id")]) db.create() l = [] for i in range(10): c = dict(i=i) db.insert(c) l.append(c) for i in range(10): curr = l[i] c = db.get("id", curr["_id"]) c["update"] = True c.update(db.update(c)) db.compact() for j in range(10): curr = l[j] c = db.get("id", curr["_id"]) assert c["_id"] == curr["_id"] assert c["i"] == j db.close()
def test_adv1(self, tmpdir, inserts): db = self._db(os.path.join(str(tmpdir), "db")) db.set_indexes([UniqueHashIndex(db.path, "id")]) db.create() l = [] for i in range(inserts): c = dict(i=i) db.insert(c) l.append(c) for j in range(inserts): curr = l[j] c = db.get("id", curr["_id"]) assert c["_id"] == curr["_id"] assert c["i"] == j for i in range(inserts): curr = l[i] c = db.get("id", curr["_id"]) c["update"] = True db.update(c) for j in range(inserts): curr = l[i] c = db.get("id", curr["_id"]) assert c["update"] == True for j in range(inserts): curr = l[j] c = db.get("id", curr["_id"]) assert db.delete(c) == True for j in range(inserts): with pytest.raises(RecordDeleted): db.get("id", l[j]["_id"]) db.close()
def test_removing_index_from_db_2(self, tmpdir): """indexes are added from strings""" db = self._db(os.path.join(str(tmpdir), "db")) db.set_indexes([UniqueHashIndex(db.path, "id")]) db.create() path_indexes = os.path.join(db.path, "_indexes") def file_exists_in_indexes_dir(name): return [f for f in os.listdir(path_indexes) if name in f] file_names = ( "tests/index_files/03md5_index.py", "tests/index_files/04withA_index.py", "tests/index_files/05custom_hash_index.py", ) indexes_names = [db.add_index(open(f).read()) for f in file_names] for index_name in indexes_names: assert db.get_index_details(index_name) assert file_exists_in_indexes_dir(index_name) db.destroy_index(index_name) assert not file_exists_in_indexes_dir(index_name) with pytest.raises(IndexException): db.get_index_details(index_name)
def test_update_custom_unique(self, tmpdir, inserts): db = self._db(os.path.join(str(tmpdir), "db")) db.set_indexes( [UniqueHashIndex(db.path, "id"), CustomHashIndex(db.path, "custom")] ) db.create() ins = [] for x in range(inserts): doc = dict(test=1) db.insert(doc) ins.append(doc) self.counter["ins"] += 1 assert len(ins) == db.count(db.all, "id") assert len(ins) == db.count(db.all, "custom") assert len(ins) == db.count(db.get_many, "custom", key=0, limit=inserts + 1) assert 0 == db.count(db.get_many, "custom", key=1, limit=inserts + 1) sample = random.sample(ins, inserts // 10) for curr in sample: curr["test"] = 10 db.update(curr) self.counter["upd"] += 1 assert self.counter["ins"] == db.count(db.all, "id") assert self.counter["ins"] == db.count(db.all, "custom") assert self.counter["upd"] == db.count( db.get_many, "custom", key=1, limit=inserts + 1 ) assert self.counter["ins"] - self.counter["upd"] == db.count( db.get_many, "custom", key=0, limit=inserts + 1 )
def test_delete_with_id_and_rev_only(self, tmpdir, inserts): db = self._db(os.path.join(str(tmpdir), "db")) db.set_indexes([UniqueHashIndex(db.path, "id")]) db.create() ins = [] for x in range(inserts): doc = dict(x=x) db.insert(doc) ins.append(doc) self.counter["ins"] += 1 for i in range(inserts // 10): curr = ins.pop(random.randint(0, len(ins) - 1)) d = {"_id": curr["_id"], "_rev": curr["_rev"]} db.delete(d) assert len(ins) == db.count(db.all, "id") for x in range(inserts): doc = dict(x=x) db.insert(doc) ins.append(doc) self.counter["ins"] += 1 for i in range(inserts // 10): curr = ins.pop(random.randint(0, len(ins) - 1)) db.delete(curr) assert len(ins) == db.count(db.all, "id")
def test_wrong_id(self, tmpdir): db = self._db(os.path.join(str(tmpdir), "db")) db.set_indexes([UniqueHashIndex(db.path, "id")]) db.create() with pytest.raises(IndexPreconditionsException): db.insert(dict(_id="1", a=1)) with pytest.raises(IndexPreconditionsException): db.insert(dict(_id=1, a=1))
def test_destroy(self, tmpdir): db = self._db(os.path.join(str(tmpdir), "db")) db.set_indexes([UniqueHashIndex(db.path, "id")]) db.create() for i in range(5): db.insert(dict(i=i)) db.destroy() db = self._db(os.path.join(str(tmpdir), "db")) with pytest.raises(DatabasePathException): db.open()
def test_similar(self, tmpdir): db = self._db(os.path.join(str(tmpdir), "db")) db.set_indexes([UniqueHashIndex(db.path, "id"), Md5Index(db.path, "md5")]) db.create() a = dict(a="pigmej") db.insert(a) db.get("md5", "pigmej") with pytest.raises(RecordNotFound): db.get("md5", "pigme")
def test_double_insert(self, tmpdir): db = self._db(os.path.join(str(tmpdir), "db")) db.set_indexes([UniqueHashIndex(db.path, "id")]) db.create() a_id = "54bee5c4628648b5a742379a1de89b2d" a1 = dict(a=1, _id=a_id) db.insert(a1) a2 = dict(a=2, _id=a_id) with pytest.raises(IndexException): db.insert(a2)
def test_insert_with_id(self, tmpdir): db = self._db(os.path.join(str(tmpdir), "db")) # db.set_indexes([UniqueHashIndex(db.path, 'id')]) # db.initialize() db.add_index(UniqueHashIndex(db.path, "id"), False) db.create() doc = dict(a=1, _id=random_hex_32()) ins = db.insert(doc) assert ins["_id"] == doc["_id"] db.close()
def test_create_with_path(self, tmpdir): p = os.path.join(str(tmpdir), "db") db = self._db(None) db.create(p, with_id_index=False) ind = UniqueHashIndex(p, "id") db.add_index(ind) for x in range(10): db.insert(dict(x=x)) assert db.count(db.all, "id") == 10
def test_auto_add_id_index_without_initialize(self, tmpdir): p = os.path.join(str(tmpdir), "db") db = self._db(None) db.create(p) ind = UniqueHashIndex(p, "id") with pytest.raises(IndexException): db.add_index(ind) for x in range(10): db.insert(dict(x=x)) assert db.count(db.all, "id") == 10
def test_create_index_duplicate_from_string(self, tmpdir): db = self._db(os.path.join(str(tmpdir), "db")) db.set_indexes([UniqueHashIndex(db.path, "id")]) db.create() path_indexes = os.path.join(db.path, "_indexes") file_name = "tests/index_files/04withA_index.py" db.add_index(open(file_name).read()) l = len(db.indexes) with pytest.raises(IndexException): db.add_index(open(file_name).read()) assert l == len(db.indexes)
def test_create_index_duplicate_from_object(self, tmpdir): db = self._db(os.path.join(str(tmpdir), "db")) db.set_indexes([UniqueHashIndex(db.path, "id")]) db.create() hash_index = HashIndex(db.path, "my_index") db.add_index(hash_index) l = len(db.indexes) hash_index = HashIndex(db.path, "my_index") with pytest.raises(IndexException): db.add_index(hash_index) assert l == len(db.indexes)
def test_offset_in_functions(self, tmpdir, inserts): db = self._db(os.path.join(str(tmpdir), "db")) db.set_indexes( [UniqueHashIndex(db.path, "id"), CustomHashIndex(db.path, "custom")] ) db.create() offset = inserts // 10 or 1 real_inserts = inserts if inserts < 1000 else 1000 for x in range(real_inserts): db.insert(dict(test=x)) assert real_inserts - offset == db.count(db.all, "id", offset=offset) assert real_inserts - offset == db.count(db.all, "custom", offset=offset) assert 1 == db.count(db.get_many, "custom", 1, limit=1, offset=offset) db.close()
def test_update_index(self, tmpdir): db = self._db(os.path.join(str(tmpdir), "db")) db.set_indexes([UniqueHashIndex(db.path, "id")]) db.create() hash_index = HashIndex(db.path, "my_index") db.add_index(hash_index) assert db.get_index_details(hash_index.name) db.destroy_index(hash_index.name) with pytest.raises(IndexNotFoundException): db.get_index_details(hash_index.name) new_index = Md5Index(db.path, "my_index") db.add_index(new_index) assert db.get_index_details(new_index.name)
def test_all(self, tmpdir, inserts): db = self._db(os.path.join(str(tmpdir), "db")) db.set_indexes([UniqueHashIndex(db.path, "id")]) db.create() l = [] for i in range(inserts): c = dict(i=i) db.insert(c) l.append(c) assert db.count(db.all, "id") == inserts to_delete = random.randint(0, inserts - 1) db.delete(l[to_delete]) assert db.count(db.all, "id") == inserts - 1
def test_open_with_path(self, tmpdir): p = os.path.join(str(tmpdir), "db") db = self._db(p) ind = UniqueHashIndex(p, "id") db.set_indexes([ind]) db.create(with_id_index=False) for x in range(10): db.insert(dict(x=x)) db.close() db = self._db(p) db.open(p) assert db.count(db.all, "id") == 10
def test_open_close(self, tmpdir): db = self._db(os.path.join(str(tmpdir), "db")) db.set_indexes([UniqueHashIndex(db.path, "id")]) db.create() l = [] for i in range(5): c = dict(i=i) db.insert(c) l.append(c) db.close() db.open() db.close() db2 = self._db(os.path.join(str(tmpdir), "db")) # db2.set_indexes([UniqueHashIndex(db.path, 'id')]) db2.open() for j in range(5): assert l[j] == db2.get("id", l[j]["_id"]) db2.close()
def test_adding_index_creates_dot_py_file(self, tmpdir): db = self._db(os.path.join(str(tmpdir), "db")) db.set_indexes([UniqueHashIndex(db.path, "id")]) db.create() path_indexes = os.path.join(db.path, "_indexes") before = set(os.listdir(path_indexes)) new_index = Md5Index(db.path, "md5_index") db.add_index(new_index) after = set(os.listdir(path_indexes)) added_file = tuple(after - before)[0] assert len(after) == len(before) + 1 assert new_index.name + ".py" in added_file
def test_create_path_delayed2(self, tmpdir): p = os.path.join(str(tmpdir), "db") db = self._db(None) db.initialize(p) db.create(with_id_index=False) ind = UniqueHashIndex(p, "id") db.add_index(ind) for x in range(10): db.insert(dict(x=x)) db.close() db = self._db(p) db.open(p) assert db.count(db.all, "id") == 10
def test_update_custom_same_key_new_value(self, tmpdir, inserts): db = self._db(os.path.join(str(tmpdir), "db")) db.set_indexes( [UniqueHashIndex(db.path, "id"), CustomHashIndex(db.path, "custom")] ) db.create() inserted = [] for x in range(inserts): inserted.append(db.insert(dict(test=x))) inserted[-1]["test"] = x for el in inserted[::20]: for i in range(4): curr = db.get("id", el["_id"], with_storage=True) assert el["test"] == curr["test"] el["test"] += random.randint(1, 3) db.update(el) assert len(inserted) == db.count(db.all, "custom") db.close()
def test_add_new_index_from_string(self, tmpdir): db = self._db(os.path.join(str(tmpdir), "db")) db.set_indexes([UniqueHashIndex(db.path, "id")]) db.create() file_names = ( "tests/index_files/03md5_index.py", "tests/index_files/04withA_index.py", "tests/index_files/05custom_hash_index.py", ) indexes_names = [db.add_index(open(f).read()) for f in file_names] assert len(db.indexes) == len(file_names) + 1 # 'id' + from files for y in range(100): db.insert(dict(a="blah", test=y, name=str(y), y=y)) for index_name in indexes_names: assert db.count(db.all, index_name) == 100