def _delete_db(self): remove_lmdbm(self._name, False)
def run_bench(N, db_tpl) -> Dict[str, Dict[str, float]]: batchsize = 1000 LMDBM_FILE = db_tpl.format("lmdbm") LMDBM_BATCH_FILE = db_tpl.format("lmdbm-batch") PYSOS_FILE = db_tpl.format("pysos") SQLITEDICT_FILE = db_tpl.format("sqlitedict") SQLITEDICT_BATCH_FILE = db_tpl.format("sqlitedict-batch") DBM_DUMB_FILE = db_tpl.format("dbm.dumb") DBM_GNU_FILE = db_tpl.format("dbm.gnu") SEMIDBM_FILE = db_tpl.format("semidbm") VEDIS_FILE = db_tpl.format("vedis") VEDIS_BATCH_FILE = db_tpl.format("vedis-batch") UNQLITE_FILE = db_tpl.format("unqlite") UNQLITE_BATCH_FILE = db_tpl.format("unqlite-batch") remove_lmdbm(LMDBM_FILE) remove_lmdbm(LMDBM_BATCH_FILE) with suppress(FileNotFoundError): os.unlink(PYSOS_FILE) with suppress(FileNotFoundError): os.unlink(SQLITEDICT_FILE) with suppress(FileNotFoundError): os.unlink(SQLITEDICT_BATCH_FILE) remove_dbm(DBM_DUMB_FILE) remove_semidbm(SEMIDBM_FILE) with suppress(FileNotFoundError): os.unlink(VEDIS_FILE) with suppress(FileNotFoundError): os.unlink(VEDIS_BATCH_FILE) with suppress(FileNotFoundError): os.unlink(UNQLITE_FILE) with suppress(FileNotFoundError): os.unlink(UNQLITE_BATCH_FILE) ret: DefaultDict[str, Dict[str, float]] = defaultdict(dict) # writes with MeasureTime() as t: with JsonLmdb.open(LMDBM_FILE, "c") as db: for k, v in data(N): db[k] = v ret["lmdbm"]["write"] = t.get() print("lmdbm write", N, t.get()) with MeasureTime() as t: with JsonLmdb.open(LMDBM_BATCH_FILE, "c") as db: for pairs in batch(data(N), batchsize): db.update(pairs) ret["lmdbm-batch"]["write"] = t.get() print("lmdbm-batch write", N, t.get()) with open(os.devnull, "w") as devnull: # mute annoying "free lines" output with redirect_stdout(devnull): with MeasureTime() as t: db = pysos.Dict(PYSOS_FILE) for k, v in data(N): db[k] = v db.close() ret["pysos"]["write"] = t.get() print("pysos write", N, t.get()) with MeasureTime() as t: with SqliteDict(SQLITEDICT_FILE, autocommit=True) as db: for k, v in data(N): db[k] = v ret["sqlitedict"]["write"] = t.get() print("sqlitedict write", N, t.get()) with MeasureTime() as t: with SqliteDict(SQLITEDICT_BATCH_FILE, autocommit=False) as db: for pairs in batch(data(N), batchsize): db.update(pairs) db.commit() ret["sqlitedict-batch"]["write"] = t.get() print("sqlitedict-batch write", N, t.get()) with MeasureTime() as t: with dbm.dumb.open(DBM_DUMB_FILE, "c") as db: for k, v in data(N): db[k] = json.dumps(v) ret["dbm.dumb"]["write"] = t.get() print("dbm.dumb write", N, t.get()) if gdbm: with MeasureTime() as t: with dbm.gnu.open(DBM_GNU_FILE, "c") as db: for k, v in data(N): db[k] = json.dumps(v) ret["dbm.gnu"]["write"] = t.get() print("dbm.gnu write", N, t.get()) with MeasureTime() as t: db = semidbm.open(SEMIDBM_FILE, "c") for k, v in data(N): db[k] = json.dumps(v) db.close() ret["semidbm"]["write"] = t.get() print("semidbm write", N, t.get()) with MeasureTime() as t: with Vedis(VEDIS_FILE) as db: for k, v in data(N): db[k] = json.dumps(v) ret["vedis"]["write"] = t.get() print("vedis write", N, t.get()) with MeasureTime() as t: with Vedis(VEDIS_BATCH_FILE) as db: for pairs in batch(data(N), batchsize): db.update({k: json.dumps(v) for k, v in pairs}) ret["vedis-batch"]["write"] = t.get() print("vedis-batch write", N, t.get()) with MeasureTime() as t: with UnQLite(UNQLITE_FILE) as db: for k, v in data(N): db[k] = json.dumps(v) ret["unqlite"]["write"] = t.get() print("unqlite write", N, t.get()) with MeasureTime() as t: with UnQLite(UNQLITE_BATCH_FILE) as db: for pairs in batch(data(N), batchsize): db.update({k: json.dumps(v) for k, v in pairs}) ret["unqlite-batch"]["write"] = t.get() print("unqlite-batch write", N, t.get()) # reads with MeasureTime() as t: with JsonLmdb.open(LMDBM_FILE, "r") as db: for k in allkeys(N): db[k] # ret["lmdbm"]["read"] = t.get() print("lmdbm cont read", N, t.get()) with MeasureTime() as t: with JsonLmdb.open(LMDBM_FILE, "r") as db: for k in randkeys(N, N): db[k] ret["lmdbm"]["read"] = t.get() print("lmdbm rand read", N, t.get()) with open(os.devnull, "w") as devnull: # mute annoying "free lines" output with redirect_stdout(devnull): with MeasureTime() as t: db = pysos.Dict(PYSOS_FILE) for k in randkeys(N, N): db[k] db.close() ret["pysos"]["read"] = t.get() print("pysos read", N, t.get()) with MeasureTime() as t: with SqliteDict(SQLITEDICT_FILE) as db: for k in randkeys(N, N): db[k] ret["sqlitedict"]["read"] = t.get() print("sqlitedict read", N, t.get()) with MeasureTime() as t: with dbm.dumb.open(DBM_DUMB_FILE, "r") as db: for k in randkeys(N, N): json.loads(db[k]) ret["dbm.dumb"]["read"] = t.get() print("dbm.dumb read", N, t.get()) if gdbm: with MeasureTime() as t: with dbm.gnu.open(DBM_GNU_FILE, "r") as db: for k in randkeys(N, N): json.loads(db[k]) ret["dbm.gnu"]["read"] = t.get() print("dbm.gnu read", N, t.get()) with MeasureTime() as t: db = semidbm.open(SEMIDBM_FILE, "r") for k in randkeys(N, N): json.loads(db[k]) db.close() ret["semidbm"]["read"] = t.get() print("semidbm read", N, t.get()) with MeasureTime() as t: with Vedis(VEDIS_FILE) as db: for k in randkeys(N, N): json.loads(db[k]) ret["vedis"]["read"] = t.get() print("vedis read", N, t.get()) with MeasureTime() as t: with UnQLite(UNQLITE_FILE) as db: for k in randkeys(N, N): json.loads(db[k]) ret["unqlite"]["read"] = t.get() print("unqlite read", N, t.get()) return ret
def run_bench(N, db_tpl): batchsize = 1000 LMDBM_FILE = db_tpl.format("lmdbm") PYSOS_FILE = db_tpl.format("pysos") SQLITEDICT_FILE = db_tpl.format("sqlitedict") DBM_FILE = db_tpl.format("dbm") remove_lmdbm(LMDBM_FILE) with suppress(FileNotFoundError): os.unlink(PYSOS_FILE) with suppress(FileNotFoundError): os.unlink(SQLITEDICT_FILE) remove_dbm(DBM_FILE) ret = defaultdict(dict) # writes """ # without batch with PrintStatementTime("lmdbm (no batch) {} writes: {{delta:.02f}}".format(N)): db = JsonLmdb.open(LMDBM_FILE, "c") for k, v in data(N): db[k] = v db.close() remove_lmdbm(LMDBM_FILE) """ with MeasureTime() as t: with JsonLmdb.open(LMDBM_FILE, "c") as db: for pairs in batch(data(N), batchsize): db.update(pairs) ret["lmdbm"]["write"] = t.get() print("lmdbm batch write", N, t.get()) with open(os.devnull, "w") as devnull: # mute annoying "free lines" output with redirect_stdout(devnull): with MeasureTime() as t: db = pysos.Dict(PYSOS_FILE) for k, v in data(N): db[k] = v db.close() ret["pysos"]["write"] = t.get() print("pysos write", N, t.get()) with MeasureTime() as t: with SqliteDict(SQLITEDICT_FILE) as db: for pairs in batch(data(N), batchsize): db.update(pairs) db.commit() ret["sqlitedict"]["write"] = t.get() print("sqlitedict batch write", N, t.get()) with MeasureTime() as t: with dbm.open(DBM_FILE, "c") as db: for k, v in data(N): db[k] = json.dumps(v) ret["dbm"]["write"] = t.get() print("dbm write", N, t.get()) # reads with MeasureTime() as t: with JsonLmdb.open(LMDBM_FILE, "r") as db: for k in allkeys(N): db[k] #ret["lmdbm"]["read"] = t.get() print("lmdbm cont read", N, t.get()) with MeasureTime() as t: with JsonLmdb.open(LMDBM_FILE, "r") as db: for k in randkeys(N, N): db[k] ret["lmdbm"]["read"] = t.get() print("lmdbm rand read", N, t.get()) with open(os.devnull, "w") as devnull: # mute annoying "free lines" output with redirect_stdout(devnull): with MeasureTime() as t: db = pysos.Dict(PYSOS_FILE) for k in randkeys(N, N): db[k] db.close() ret["pysos"]["read"] = t.get() print("pysos read", N, t.get()) with MeasureTime() as t: with SqliteDict(SQLITEDICT_FILE) as db: for k in randkeys(N, N): db[k] ret["sqlitedict"]["read"] = t.get() print("sqlitedict read", N, t.get()) with MeasureTime() as t: with dbm.open(DBM_FILE, "r") as db: for k in randkeys(N, N): json.loads(db[k]) ret["dbm"]["read"] = t.get() print("dbm read", N, t.get()) return ret