def test_utils_montydump(tmp_monty_repo): database = "dump_db_BSON" collection = "dump_col_BSON" with open_repo(tmp_monty_repo): montydump(database, collection, BSON_DUMP) with open(BSON_DUMP, "rb") as dump: raw = dump.read() assert base64.b64encode(raw) == BINARY
def test_utils_montyexport(tmp_monty_repo): database = "dump_db_JSON" collection = "dump_col_JSON" with open_repo(tmp_monty_repo): montyexport(database, collection, JSON_DUMP) with open(JSON_DUMP, "r") as dump: data = dump.read().strip() for d, s in zip(data.split("\n"), SERIALIZED.split("\n")): assert json_util.loads(d) == json_util.loads(s) os.remove(JSON_DUMP)
def test_utils_montyrestore(monty_client, tmp_monty_utils_repo): database = "dump_db_BSON" collection = "dump_col_BSON" with open_repo(tmp_monty_utils_repo): with open(BSON_DUMP, "wb") as dump: dump.write(base64.b64decode(BINARY)) montyrestore(database, collection, BSON_DUMP) col = monty_client[database][collection] for i, doc in enumerate(col.find(sort=[("_id", 1)])): assert doc == BSON.encode(DOCUMENTS[i]).decode() os.remove(BSON_DUMP)
def test_utils_montyimport(monty_client, tmp_monty_utils_repo): database = "dump_db_JSON" collection = "dump_col_JSON" with open_repo(tmp_monty_utils_repo): with open(JSON_DUMP, "w") as dump: dump.write(SERIALIZED) montyimport(database, collection, JSON_DUMP) col = monty_client[database][collection] for i, doc in enumerate(col.find(sort=[("_id", 1)])): assert doc == BSON.encode(DOCUMENTS[i]).decode() os.remove(JSON_DUMP)
def test_utils_montydump(monty_client, tmp_monty_utils_repo): database = "dump_db_BSON" collection = "dump_col_BSON" if monty_client.server_info()["storageEngine"] == "lightning": pytest.skip("LMDB's document natural order is lexicographic, not easy " "to match with MongoDB's natural order but safe to skip.") # TODO: should not rely on other test test_utils_montyrestore(monty_client, tmp_monty_utils_repo) with open_repo(tmp_monty_utils_repo): montydump(database, collection, BSON_DUMP) with open(BSON_DUMP, "rb") as dump: raw = dump.read() assert base64.b64encode(raw) == BINARY
def test_utils_montyrestore(tmp_monty_repo): database = "dump_db_BSON" collection = "dump_col_BSON" if not os.path.isdir(tmp_monty_repo): os.makedirs(tmp_monty_repo) with open_repo(tmp_monty_repo): with open(BSON_DUMP, "wb") as dump: dump.write(base64.b64decode(BINARY)) montyrestore(database, collection, BSON_DUMP) client = MontyClient() for i, doc in enumerate(client[database][collection].find()): assert doc == BSON.encode(DOCUMENTS[i]).decode() os.remove(BSON_DUMP)
def test_utils_montyimport(tmp_monty_repo): database = "dump_db_JSON" collection = "dump_col_JSON" if not os.path.isdir(tmp_monty_repo): os.makedirs(tmp_monty_repo) with open_repo(tmp_monty_repo): with open(JSON_DUMP, "w") as dump: dump.write(SERIALIZED) montyimport(database, collection, JSON_DUMP) client = MontyClient() for i, doc in enumerate(client[database][collection].find()): assert doc == BSON.encode(DOCUMENTS[i]).decode() os.remove(JSON_DUMP)
def test_utils_montyexport(monty_client, tmp_monty_utils_repo): database = "dump_db_JSON" collection = "dump_col_JSON" # TODO: should not rely on other test test_utils_montyimport(monty_client, tmp_monty_utils_repo) with open_repo(tmp_monty_utils_repo): montyexport(database, collection, JSON_DUMP) loaded_examples = list() loaded_exported = list() with open(JSON_DUMP, "r") as dump: data = dump.read().strip() for d, s in zip(data.split("\n"), SERIALIZED.split("\n")): loaded_exported.append(json_util.loads(d)) loaded_examples.append(json_util.loads(s)) sort = (lambda l: sorted(l, key=lambda i: i["_id"])) for d, s in zip(sort(loaded_exported), sort(loaded_examples)): assert d == s os.remove(JSON_DUMP)