def insert_legacy_object(tx_list): test_legacy = sqlite3.connect('file:ledger_legacy?mode=memory', uri=True, timeout=1) create(test_legacy, SQL_CREATE_LEGACY) for tx in tx_list: # Creates instance from tuple data, copy to inner properties (converts to binary as well) tx = Transaction.from_legacy(tx) # Then export again test_legacy.execute("INSERT INTO transactions VALUES (?,?,?,?,?,?,?,?,?,?,?,?)", tx.to_tuple()) return test_legacy
def insert_new(tx_list): # Using in-ram DB to avoid disk I/O artefacts test_new = sqlite3.connect('file:ledger_new?mode=memory', uri=True, timeout=1) create(test_new, SQL_CREATE) for tx in tx_list: # Creates instance from tuple data, copy to inner properties tx = Transaction.from_legacy(tx) # Then converts to bin and into bin tuple test_new.execute("INSERT INTO transactions VALUES (?,?,?,?,?,?,?,?,?,?,?,?)", tx.to_bin_tuple(sqlite_encode=True)) return test_new
if __name__ == "__main__": try: remove('tx_dataset.json') except: pass try: remove('tx_tuple_dataset.json') except: pass with sqlite3.connect('../../../Bismuth-temp/static/ledger.db', timeout=1) as ledger: # TODO: use a default path and give custom db path to command line for more flexible use depending on context ledger.text_factory = str res = ledger.execute( "select * from transactions where block_height > 700000 limit 100000" ) with open("tx_dataset.json", "w") as fp: for row in res: tx = Transaction.from_legacy(row) fp.write(tx.to_json() + "\n") res = ledger.execute( "select * from transactions where block_height > 700000 limit 100000" ) with open("tx_tuple_dataset.json", "w") as fp: for row in res: tx = Transaction.from_legacy(row) fp.write(json.dumps(tx.to_tuple()) + "\n")