def flush(self, update_state=True): """ Flushes internal queue and resets scratch. If update_state is True, then this also interprets its transactions against state """ self.log.info( "Flushing queue with update_state={} for {} items".format( update_state, len(self.queue))) if update_state: self.log.debug("Updating state...") queries = itertools.chain(*[row[1:] for row in self.queue]) self.log.info("got queries to execute: {}".format(str(queries))) with DB() as db: for q in queries: qt = q.compile(compile_kwargs={'literal_binds': True}) self.log.debug("executing query {}".format(qt)) db.execute(q) # TODO -- implement # for query in self.queue: # tables.db.execute(query) self.log.debug("Done updating state") # Drop scratch with DB() as db: # NOTE -- this just drops the scratch version of 'balances' for now. If interpretation of tx's were to # operate on other tables, (and consequently other scratch tables), these would need to be dropped as well. q = delete(db.tables.mapping[db.tables.balances]) self.log.critical("\n attemtpign to executing query {}".format(q)) db.execute(q) self.queue.clear()
def test_creates_new_db_only_once(self): """ Tests that a DB instance is only created once for each process. """ pid = os.getpid() lock1, lock2 = None, None before_creation = DBSingletonMeta._instances.copy( ) # Dict of DB instances before we call DB() with DB() as db: lock1 = db.lock after_creation1 = DBSingletonMeta._instances.copy() self.assertTrue(len(after_creation1) == len(before_creation) + 1) self.assertTrue(pid in after_creation1) # Invoke a second time ... this should not create anything new, and should return the same lock with DB() as db: lock2 = db.lock after_creation2 = DBSingletonMeta._instances.copy() self.assertTrue(len(after_creation1) == len(after_creation2)) self.assertEqual(lock1, lock2)
def start_mn(): log = get_logger("MasternodeFactor") log.critical("\n***Starting Masternode\n") DB.set_context('{}_masternode'.format(DB_NAME)) with DB(should_reset=True) as db: pass NodeFactory.run_masternode()
def start_witness(i): log = get_logger("WitnessFactory") DB.set_context('{}_witness_{}'.format(DB_NAME, i)) with DB(should_reset=True) as db: pass w_info = Constants.Testnet.Witnesses[i] log.critical("\n\nStarting witness on slot {} with info {}\n\n".format( i, w_info)) NodeFactory.run_witness(signing_key=w_info['sk'], ip=w_info['url'], name="Witness_{}".format(i + 1))
def get_block(cls, number: int = 0, hash: str = '') -> dict or None: """ Retrieves a block by its hash, or autoincrement number. Returns a dictionary with a key for each column in the blocks table. Returns None if no block with the specified hash/number is found. :param number: The number of the block to fetch. The genesis block is number 1, and the first 'real' block is number 2, and so on. :param hash: The hash of the block to lookup. Must be valid 64 char hex string :return: A dictionary, containing a key for each column in the blocks table. """ assert bool(number > 0) ^ bool( hash), "Either 'number' XOR 'hash' arg must be given" with DB() as db: blocks = db.tables.blocks if number > 0: block = blocks.select().where(blocks.number == number).run( db.ex) return cls._decode_block(block[0]) if block else None elif hash: assert is_valid_hex( hash, length=64), "Invalid block hash {}".format(hash) block = blocks.select().where(blocks.hash == hash).run(db.ex) return cls._decode_block(block[0]) if block else None
def interpret_transaction(self, tx): """ Interprets the transaction, and updates scratch/balance state as necessary. If any validation fails (i.e. insufficient balance), this method will raise an exception :param tx: A TestNetTransaction object to interpret """ self.log.debug("Interpreting tx {}".format(tx)) assert issubclass(type(tx), TransactionBase), "Transaction type {} is not a subclass of TransactionBase" \ .format(type(tx)) queries = tx.interpret(compile_deltas=False) if not queries: self.log.error("\n!!! Error interpreting tx {}\n".format(tx)) return self.log.debug("Got queries {} for tx {}".format(queries, tx)) self.queue.append((tx, *queries)) for q in queries: self.log.debug("About to get scratch query for query {}".format(q)) scratch_q = ScratchCloningVisitor().traverse(q) # TODO move context manager outside of loop with DB() as db: if scratch_q.__class__ == Update: scratch_q.table = db.tables.mapping[scratch_q.table] db.execute(scratch_q)
def start_delelegate(i): log = get_logger("DelegateFactory") db_name = DB_NAME + '_delegate_' + str(i) d_info = Constants.Testnet.Delegates[i] log.critical( "\n***Instantiating a new delegate on slot {} with db name: {}, and info {}\n" .format(i, db_name, d_info)) DB.set_context(db_name) with DB(should_reset=True) as db: pass NodeFactory.run_delegate(ip=d_info['url'], signing_key=d_info['sk'], name="Delegate_{}".format(i + 1))
def get_latest_block(cls) -> dict: """ Retrieves the latest block published in the chain. :return: A dictionary representing the latest block, containing a key for each column in the blocks table. """ with DB() as db: latest = db.tables.blocks.select().order_by( 'number', desc=True).limit(1).run(db.ex) assert latest, "No blocks found! There should be a genesis. Was the database properly seeded?" return cls._decode_block(latest[0])
def test_creates_new_db(self): """ Tests that a new instance of DB is lazily created when DB() is invoked """ pid = os.getpid() before_creation = DBSingletonMeta._instances.copy( ) # Dict of DB instances before we call DB() with DB() as db: pass after_creation = DBSingletonMeta._instances self.assertTrue(len(after_creation) == len(before_creation) + 1) self.assertTrue(pid in after_creation)
def run_mn(): from cilantro.logger import get_logger from cilantro import Constants from cilantro.nodes import NodeFactory from cilantro.db import DB, DB_NAME import os log = get_logger("MASTERNODE FACTORY") with DB('{}_masternode'.format(DB_NAME), should_reset=True) as db: pass ip = os.getenv('HOST_IP') #Constants.Testnet.Masternodes[0]['ip'] sk = Constants.Testnet.Masternodes[0]['sk'] log.critical("\n\n\nMASTERNODE BOOTING WITH IP {} AND SK {}".format( ip, sk)) NodeFactory.run_masternode(ip=ip, signing_key=sk)
def run_mn(): from cilantro.logger import get_logger from cilantro import Constants from cilantro.nodes import NodeFactory from cilantro.db import DB, DB_NAME import os, time log = get_logger("MASTERNODE FACTORY") sk = Constants.Testnet.Masternode.Sk url = 'tcp://{}:{}'.format(os.getenv('HOST_IP'), Constants.Testnet.Masternode.InternalUrl[-4:]) with DB('{}'.format(DB_NAME), should_reset=True) as db: pass log.critical("\n\n\nMASTERNODE BOOTING WITH URL {} AND SK {}".format( url, sk)) mn = NodeFactory.run_masternode(ip=url, signing_key=sk)
def run_witness(slot_num): from cilantro.logger import get_logger from cilantro import Constants from cilantro.nodes import NodeFactory from cilantro.db import DB, DB_NAME import os log = get_logger("WITNESS FACTORY") with DB('{}_witness_{}'.format(DB_NAME, slot_num), should_reset=True) as db: pass w_info = Constants.Testnet.Witnesses[slot_num] w_info['ip'] = os.getenv('HOST_IP') log.critical("Building witness on slot {} with info {}".format( slot_num, w_info)) NodeFactory.run_witness(ip=w_info['ip'], signing_key=w_info['sk'])
def _get_latest_block_hash(cls) -> str: """ Looks into the DB, and returns the latest block's hash. If the latest block_hash is for whatever reason invalid, (ie. not valid 64 char hex string), then this method will raise an assertion. :return: A string, representing the latest (most recent) block's hash :raises: An assertion if the latest block hash is not vaild 64 character hex. If this happens, something was seriously messed up in the block storage process. """ with DB() as db: row = db.tables.blocks.select().order_by( 'number', desc=True).limit(1).run(db.ex)[0] last_hash = row['hash'] assert is_valid_hex( last_hash, length=64 ), "Latest block hash is invalid 64 char hex! Got {}".format( last_hash) return last_hash
def run_delegate(slot_num): from cilantro.logger import get_logger from cilantro import Constants from cilantro.nodes import NodeFactory from cilantro.db import DB, DB_NAME import os log = get_logger("DELEGATE FACTORY") d_info = Constants.Testnet.Delegates[slot_num] d_info['ip'] = os.getenv('HOST_IP') # Set default database name for this instance with DB('{}_delegate_{}'.format(DB_NAME, slot_num), should_reset=True) as db: pass log.critical("Building delegate on slot {} with info {}".format( slot_num, d_info)) NodeFactory.run_delegate(ip=d_info['ip'], signing_key=d_info['sk'])
def run_delegate(slot_num): from cilantro.logger import get_logger from cilantro import Constants from cilantro.nodes import NodeFactory from cilantro.db import DB, DB_NAME import os log = get_logger("DELEGATE FACTORY") d_info = Constants.Testnet.Delegates[slot_num] port = d_info['url'][-4:] d_info['url'] = 'tcp://{}:{}'.format(os.getenv('HOST_IP'), port) with DB('{}_delegate_{}'.format(DB_NAME, slot_num), should_reset=True) as db: pass log.critical("Building witness on slot {} with info {}".format( slot_num, d_info)) NodeFactory.run_delegate(ip=d_info['url'], signing_key=d_info['sk'])
def get_raw_transactions_from_block( cls, block_hash: str) -> List[bytes] or None: """ Retrieves a list of raw transactions associated with a particular block. Returns None if no block with the given hash can be found. :param block_hash: :return: A list of raw transactions each as bytes, or None if no block with the given hash can be found """ assert is_valid_hex( block_hash, length=64 ), "Expected block_hash to be 64 char hex str, not {}".format( block_hash) with DB() as db: transactions = db.tables.transactions rows = transactions.select().where( transactions.block_hash == block_hash).run(db.ex) if not rows: return None else: return [decode_tx(row['data']) for row in rows]
def get_raw_transaction(cls, tx_hash: str) -> bytes or None: """ Retrieves a single raw transaction from its hash. Returns None if no transaction for that hash can be found :param tx_hash: The hash of the raw transaction to lookup. Should be a 64 character hex string :return: The raw transactions as bytes, or None if no transaction with that hash can be found """ assert is_valid_hex( tx_hash, length=64 ), "Expected tx_hash to be 64 char hex str, not {}".format(tx_hash) with DB() as db: transactions = db.tables.transactions rows = transactions.select().where( transactions.hash == tx_hash).run(db.ex) if not rows: return None else: assert len( rows ) == 1, "Multiple Transactions found with has {}! BIG TIME DEVELOPMENT ERROR!!!".format( tx_hash) return decode_tx(rows[0]['data'])
class BlockStorageDriver: """ This class provides a high level functional API for storing/retrieving blockchain data. It interfaces with the database under the hood using the process-specific DB Singleton. This allows all methods on this class to be implemented as class methods, since database cursors are provided via the Singleton instead of stored as properties on the BlockStorageDriver class/instance. """ def __init__(self): raise NotImplementedError( "Do not instantiate this class! Instead, use the class methods.") @classmethod def store_block(cls, block_contender: BlockContender, raw_transactions: List[bytes], publisher_sk: str, timestamp: int = 0): """ Persist a new block to the blockchain, along with the raw transactions associated with the block. An exception will be raised if an error occurs either validating the new block data, or storing the block. Thus, it is recommended that this method is wrapped in a try block. :param block_contender: A BlockContender instance :param raw_transactions: A list of ordered raw transactions contained in the block :param publisher_sk: The signing key of the publisher (a Masternode) who is publishing the block :param timestamp: The time the block was published, in unix epoch time. If 0, time.time() is used :return: None :raises: An assertion error if invalid args are passed into this function, or a BlockStorageValidationException if validation fails on the attempted block TODO -- think really hard and make sure that this is 'collision proof' (extremely unlikely, but still possible) - could there be a hash collision in the Merkle tree nodes? - hash collision in block hash space? - hash collision in transaction space? """ assert isinstance( block_contender, BlockContender ), "Expected block_contender arg to be BlockContender instance" assert is_valid_hex( publisher_sk, 64), "Invalid signing key {}. Expected 64 char hex str".format( publisher_sk) if not timestamp: timestamp = int(time.time()) tree = MerkleTree.from_raw_transactions(raw_transactions) publisher_vk = ED25519Wallet.get_vk(publisher_sk) publisher_sig = ED25519Wallet.sign(publisher_sk, tree.root) # Build and validate block_data block_data = { 'block_contender': block_contender, 'timestamp': timestamp, 'merkle_root': tree.root_as_hex, 'merkle_leaves': tree.leaves_as_concat_hex_str, 'prev_block_hash': cls._get_latest_block_hash(), 'masternode_signature': publisher_sig, 'masternode_vk': publisher_vk, } cls._validate_block_data(block_data) # Compute block hash block_hash = cls._compute_block_hash(block_data) # Encode block data for serialization and finally persist the data log.info( "Attempting to persist new block with hash {}".format(block_hash)) block_data = cls._encode_block(block_data) with DB() as db: # Store block res = db.tables.blocks.insert([{ 'hash': block_hash, **block_data }]).run(db.ex) if res: log.info( "Successfully inserted new block with number {} and hash {}" .format(res['last_row_id'], block_hash)) else: log.error( "Error inserting block! Got None/False result back from insert query. Result={}" .format(res)) return # Store raw transactions log.info( "Attempting to store {} raw transactions associated with block hash {}" .format(len(raw_transactions), block_hash)) tx_rows = [{ 'hash': Hasher.hash(raw_tx), 'data': encode_tx(raw_tx), 'block_hash': block_hash } for raw_tx in raw_transactions] res = db.tables.transactions.insert(tx_rows).run(db.ex) if res: log.info("Successfully inserted {} transactions".format( res['row_count'])) else: log.error( "Error inserting raw transactions! Got None from insert query. Result={}" .format(res)) @classmethod def get_block(cls, number: int = 0, hash: str = '') -> dict or None: """ Retrieves a block by its hash, or autoincrement number. Returns a dictionary with a key for each column in the blocks table. Returns None if no block with the specified hash/number is found. :param number: The number of the block to fetch. The genesis block is number 1, and the first 'real' block is number 2, and so on. :param hash: The hash of the block to lookup. Must be valid 64 char hex string :return: A dictionary, containing a key for each column in the blocks table. """ assert bool(number > 0) ^ bool( hash), "Either 'number' XOR 'hash' arg must be given" with DB() as db: blocks = db.tables.blocks if number > 0: block = blocks.select().where(blocks.number == number).run( db.ex) return cls._decode_block(block[0]) if block else None elif hash: assert is_valid_hex( hash, length=64), "Invalid block hash {}".format(hash) block = blocks.select().where(blocks.hash == hash).run(db.ex) return cls._decode_block(block[0]) if block else None @classmethod def get_latest_block(cls) -> dict: """ Retrieves the latest block published in the chain. :return: A dictionary representing the latest block, containing a key for each column in the blocks table. """ with DB() as db: latest = db.tables.blocks.select().order_by( 'number', desc=True).limit(1).run(db.ex) assert latest, "No blocks found! There should be a genesis. Was the database properly seeded?" return cls._decode_block(latest[0]) @classmethod def get_raw_transaction(cls, tx_hash: str) -> bytes or None: """ Retrieves a single raw transaction from its hash. Returns None if no transaction for that hash can be found :param tx_hash: The hash of the raw transaction to lookup. Should be a 64 character hex string :return: The raw transactions as bytes, or None if no transaction with that hash can be found """ assert is_valid_hex( tx_hash, length=64 ), "Expected tx_hash to be 64 char hex str, not {}".format(tx_hash) with DB() as db: transactions = db.tables.transactions rows = transactions.select().where( transactions.hash == tx_hash).run(db.ex) if not rows: return None else: assert len( rows ) == 1, "Multiple Transactions found with has {}! BIG TIME DEVELOPMENT ERROR!!!".format( tx_hash) return decode_tx(rows[0]['data']) @classmethod def get_raw_transactions_from_block( cls, block_hash: str) -> List[bytes] or None: """ Retrieves a list of raw transactions associated with a particular block. Returns None if no block with the given hash can be found. :param block_hash: :return: A list of raw transactions each as bytes, or None if no block with the given hash can be found """ assert is_valid_hex( block_hash, length=64 ), "Expected block_hash to be 64 char hex str, not {}".format( block_hash) with DB() as db: transactions = db.tables.transactions rows = transactions.select().where( transactions.block_hash == block_hash).run(db.ex) if not rows: return None else: return [decode_tx(row['data']) for row in rows] @classmethod def validate_blockchain(cls, async=False): """ Validates the cryptographic integrity of the blockchain. See spec in docs folder for details on what defines a valid blockchain structure. # TODO docstring :param async: If true, run this in a separate process :raises: An exception if validation fails """ start = time.time() if async: raise NotImplementedError() with DB() as db: blocks = db.tables.blocks.select().order_by('number', desc=False).run(db.ex) assert blocks, "No blocks found! There should be a genesis. Was the database properly seeded?" for i in range(len(blocks) - 1): cls._validate_block_link(cls._decode_block(blocks[i]), cls._decode_block(blocks[i + 1])) log.info("Blockchain validation completed successfully in {} seconds.". format(round(time.time() - start, 2)))
def _run_db_proc(shared_mem): with DB() as db: pid = os.getpid() shared_mem[pid] = id(db.lock)
p = { "policy": name, "type": "multi_discrete", "last_election": 0, "election_length": 168, "election_frequency": 336, "max_votes": 0, "value": payload, "round": 0 } return p j = json.load(open(os.path.join(os.path.dirname(__file__), 'constitution.json'))) with DB('{}_{}'.format(DB_NAME, 0)) as db: masternodes = [] delegates = [] witnesses = [] # add state for tables that are not masternodes and delegates as those get treated differently for k in j.keys(): for item in j[k]: if k != 'masternodes' and k != 'delegates' and k != 'witnesses': t = getattr(db.tables, k) db.execute(t.insert(item)) elif k == 'masternodes': masternodes.append(item) elif k == 'delegates': delegates.append(item)
def store_block(cls, block_contender: BlockContender, raw_transactions: List[bytes], publisher_sk: str, timestamp: int = 0): """ Persist a new block to the blockchain, along with the raw transactions associated with the block. An exception will be raised if an error occurs either validating the new block data, or storing the block. Thus, it is recommended that this method is wrapped in a try block. :param block_contender: A BlockContender instance :param raw_transactions: A list of ordered raw transactions contained in the block :param publisher_sk: The signing key of the publisher (a Masternode) who is publishing the block :param timestamp: The time the block was published, in unix epoch time. If 0, time.time() is used :return: None :raises: An assertion error if invalid args are passed into this function, or a BlockStorageValidationException if validation fails on the attempted block TODO -- think really hard and make sure that this is 'collision proof' (extremely unlikely, but still possible) - could there be a hash collision in the Merkle tree nodes? - hash collision in block hash space? - hash collision in transaction space? """ assert isinstance( block_contender, BlockContender ), "Expected block_contender arg to be BlockContender instance" assert is_valid_hex( publisher_sk, 64), "Invalid signing key {}. Expected 64 char hex str".format( publisher_sk) if not timestamp: timestamp = int(time.time()) tree = MerkleTree.from_raw_transactions(raw_transactions) publisher_vk = ED25519Wallet.get_vk(publisher_sk) publisher_sig = ED25519Wallet.sign(publisher_sk, tree.root) # Build and validate block_data block_data = { 'block_contender': block_contender, 'timestamp': timestamp, 'merkle_root': tree.root_as_hex, 'merkle_leaves': tree.leaves_as_concat_hex_str, 'prev_block_hash': cls._get_latest_block_hash(), 'masternode_signature': publisher_sig, 'masternode_vk': publisher_vk, } cls._validate_block_data(block_data) # Compute block hash block_hash = cls._compute_block_hash(block_data) # Encode block data for serialization and finally persist the data log.info( "Attempting to persist new block with hash {}".format(block_hash)) block_data = cls._encode_block(block_data) with DB() as db: # Store block res = db.tables.blocks.insert([{ 'hash': block_hash, **block_data }]).run(db.ex) if res: log.info( "Successfully inserted new block with number {} and hash {}" .format(res['last_row_id'], block_hash)) else: log.error( "Error inserting block! Got None/False result back from insert query. Result={}" .format(res)) return # Store raw transactions log.info( "Attempting to store {} raw transactions associated with block hash {}" .format(len(raw_transactions), block_hash)) tx_rows = [{ 'hash': Hasher.hash(raw_tx), 'data': encode_tx(raw_tx), 'block_hash': block_hash } for raw_tx in raw_transactions] res = db.tables.transactions.insert(tx_rows).run(db.ex) if res: log.info("Successfully inserted {} transactions".format( res['row_count'])) else: log.error( "Error inserting raw transactions! Got None from insert query. Result={}" .format(res))