def setUp(self): self.block_store = BlockStore(DictDatabase()) self.receipt_store = TransactionReceiptStore(DictDatabase()) self._txn_ids_by_block_id = {} for block_id, blk_w, txn_ids in create_chain(): self.block_store[block_id] = blk_w self._txn_ids_by_block_id[block_id] = txn_ids for txn_id in txn_ids: receipt = create_receipt(txn_id=txn_id, key_values=[("address", block_id)]) self.receipt_store.put(txn_id=txn_id, txn_receipt=receipt)
def test_state_verifier(self): blockstore = BlockStore( DictDatabase(indexes=BlockStore.create_index_configuration())) global_state_db = DictDatabase() precalculated_state_roots = [ "e35490eac6f77453675c3399da7efe451e791272bbc8cf1b032c75030fb455c3", "3a369eb951171895c00ba2ffd04bfa1ef98d6ee651f96a65ae3280cf8d67d5e7", "797e70e29915c9129f950b2084ed0e3c09246bd1e6c232571456f51ca85df340", ] signer = get_signer() populate_blockstore(blockstore, signer, precalculated_state_roots) verify_state(global_state_db, blockstore, "tcp://eth0:4004", "serial")
def test_iterate_chain_on_empty_block_store(self): """Given a block store with no blocks, iterate using predecessor iterator and verify that it results in an empty list. """ block_store = BlockStore(DictDatabase()) self.assertEqual([], [b for b in block_store.get_predecessor_iter()])
def test_requires_genesis_fails_if_joins_network_with_file(self): """ In this case, when there is - a genesis_batch_file - network id the validator should produce an assertion error, as it is joining a network, and not a genesis node. """ self._with_empty_batch_file() self._with_network_name('some_block_chain_id') block_store = self.make_block_store() genesis_ctrl = GenesisController(Mock('context_manager'), Mock('txn_executor'), Mock('completer'), block_store, StateViewFactory(DictDatabase()), self._signer, data_dir=self._temp_dir, config_dir=self._temp_dir, chain_id_manager=ChainIdManager( self._temp_dir), batch_sender=Mock('batch_sender')) with self.assertRaises(InvalidGenesisStateError): genesis_ctrl.requires_genesis()
def compute_state_hashes_wo_scheduler(self): """Creates a state hash from the state updates from each txn in a valid batch. Returns state_hashes (list of str): The merkle roots from state changes in 1 or more blocks in the yaml file. """ tree = MerkleDatabase(database=DictDatabase()) state_hashes = [] updates = {} for batch in self._batches: b_id = batch.header_signature result = self._batch_results[b_id] if result.is_valid: for txn in batch.transactions: txn_id = txn.header_signature _, address_values = self._txn_execution[txn_id] batch_updates = {} for pair in address_values: batch_updates.update({a: pair[a] for a in pair.keys()}) # since this is entirely serial, any overwrite # of an address is expected and desirable. updates.update(batch_updates) # This handles yaml files that have state roots in them if result.state_hash is not None: s_h = tree.update(set_items=updates, virtual=False) tree.set_merkle_root(merkle_root=s_h) state_hashes.append(s_h) if not state_hashes: state_hashes.append(tree.update(set_items=updates)) return state_hashes
def test_requires_genesis_fails_if_block_exists(self): """ In this case, when there is - a genesis_batch_file - a chain head id the validator should produce an assertion, as it already has a genesis block and should not attempt to produce another. """ self._with_empty_batch_file() block = self._create_block() block_store = self.make_block_store({block.header_signature: block}) genesis_ctrl = GenesisController(Mock('context_manager'), Mock('txn_executor'), Mock('completer'), block_store, StateViewFactory(DictDatabase()), self._signer, data_dir=self._temp_dir, config_dir=self._temp_dir, chain_id_manager=ChainIdManager( self._temp_dir), batch_sender=Mock('batch_sender')) with self.assertRaises(InvalidGenesisStateError): genesis_ctrl.requires_genesis()
def test_get_receipts(self): """Tests that the TransactionReceiptGetRequestHandler will return a response with the receipt for the transaction requested. """ receipt_store = TransactionReceiptStore(DictDatabase()) receipt = TransactionReceipt( data=[TransactionReceipt.Data( data_type="dead", data="beef".encode())]) receipt_store.put("deadbeef", receipt) handler = ClientReceiptGetRequestHandler(receipt_store) request = ClientReceiptGetRequest( transaction_ids=['deadbeef']).SerializeToString() response = handler.handle('test_conn_id', request) self.assertEqual(HandlerStatus.RETURN, response.status) self.assertEqual(ClientReceiptGetResponse.OK, response.message_out.status) self.assertEqual([receipt], [r for r in response.message_out.receipts]) request = ClientReceiptGetRequest( transaction_ids=['unknown']).SerializeToString() response = handler.handle('test_conn_id', request) self.assertEqual(HandlerStatus.RETURN, response.status) self.assertEqual(ClientReceiptGetResponse.NO_RESOURCE, response.message_out.status)
def setUp(self): self.block_store = BlockStore( DictDatabase(indexes=BlockStore.create_index_configuration())) self.block_manager = BlockManager() self.block_manager.add_store("commit_store", self.block_store) self.gossip = MockGossip() self.completer = Completer( block_manager=self.block_manager, transaction_committed=self.block_store.has_transaction, get_committed_batch_by_id=self.block_store.get_batch, get_committed_batch_by_txn_id=( self.block_store.get_batch_by_transaction), get_chain_head=lambda: self.block_store.chain_head, gossip=self.gossip) self.completer.set_on_block_received(self._on_block_received) self.completer.set_on_batch_received(self._on_batch_received) self._has_block_value = True context = create_context('secp256k1') private_key = context.new_random_private_key() crypto_factory = CryptoFactory(context) self.signer = crypto_factory.new_signer(private_key) self.blocks = [] self.batches = []
def test_publish_deltas_no_state_changes(self): """Given a block transition, where no state changes happened (e.g. it only had transactions which did not change state), the StateDeltaProcessor should still publish an event with the block change information. """ mock_service = Mock() block_tree_manager = BlockTreeManager() database = DictDatabase() delta_store = StateDeltaStore(database) delta_processor = StateDeltaProcessor( service=mock_service, state_delta_store=delta_store, block_store=block_tree_manager.block_store) delta_processor.add_subscriber( 'subscriber_conn_id', [block_tree_manager.chain_head.identifier], ['000000']) next_block = block_tree_manager.generate_block() delta_processor.publish_deltas(next_block) mock_service.send.assert_called_with( validator_pb2.Message.STATE_DELTA_EVENT, StateDeltaEvent( block_id=next_block.identifier, block_num=next_block.header.block_num, state_root_hash=next_block.header.state_root_hash, previous_block_id=next_block.header.previous_block_id, state_changes=[]).SerializeToString(), connection_id='subscriber_conn_id')
def test_state_verifier(self): blockstore = BlockStore(DictDatabase( indexes=BlockStore.create_index_configuration())) global_state_db = NativeLmdbDatabase( os.path.join(self._temp_dir, 'test_state_verifier.lmdb')) precalculated_state_roots = [ "e35490eac6f77453675c3399da7efe451e791272bbc8cf1b032c75030fb455c3", "3a369eb951171895c00ba2ffd04bfa1ef98d6ee651f96a65ae3280cf8d67d5e7", "797e70e29915c9129f950b2084ed0e3c09246bd1e6c232571456f51ca85df340", ] signer = get_signer() populate_blockstore(blockstore, signer, precalculated_state_roots) verify_state( global_state_db, blockstore, "tcp://eth0:4004", "serial") # There is a bug in the shutdown code for some component this depends # on, which causes it to occassionally hang during shutdown. Just kill # the process for now. # pylint: disable=protected-access os._exit(0)
def make_db_and_store(size=3, start='a'): """ Creates and returns three related objects for testing: * database - dict database with evolving state * store - blocks with root hashes corresponding to that state * roots - list of root hashes used in order With defaults, the values at the three roots look like this: * 0 - {'a': b'1'} * 1 - {'a': b'2', 'b': b'4'} * 2 - {'a': b'3', 'b': b'5', 'c': b'7'} """ database = DictDatabase() store = MockBlockStore(size=0) roots = [] merkle = MerkleDatabase(database) data = {} for i in range(size): for k, v in data.items(): data[k] = str(int(v) + 1).encode() data[_increment_key(start, i)] = str(i * size + 1).encode() root = merkle.update(data, virtual=False) roots.append(root) store.add_block(str(i), root) return database, store, roots
def __init__(self, with_genesis=True): self.block_sender = MockBlockSender() self.batch_sender = MockBatchSender() self.block_store = BlockStore(DictDatabase()) self.block_cache = BlockCache(self.block_store) self.state_db = {} # add the mock reference to the consensus consensus_setting_addr = ConfigView.setting_address( 'sawtooth.consensus.algorithm') self.state_db[consensus_setting_addr] = _setting_entry( 'sawtooth.consensus.algorithm', 'test_journal.mock_consensus') self.state_view_factory = MockStateViewFactory(self.state_db) self.signing_key = signing.generate_privkey() self.public_key = signing.generate_pubkey(self.signing_key) self.identity_signing_key = signing.generate_privkey() chain_head = None if with_genesis: self.genesis_block = self.generate_genesis_block() self.set_chain_head(self.genesis_block) chain_head = self.genesis_block self.block_publisher = BlockPublisher( transaction_executor=MockTransactionExecutor(), block_cache=self.block_cache, state_view_factory=self.state_view_factory, block_sender=self.block_sender, batch_sender=self.block_sender, squash_handler=None, chain_head=chain_head, identity_signing_key=self.identity_signing_key, data_dir=None, config_dir=None)
def make_db_and_store(size=3): database = DictDatabase() store = MockBlockStore(size=0) roots = [] merkle = MerkleDatabase(database) data = {} # Create all the keys that will be used. Keys are zero-padded hex strings # starting with '1'. keys = [format(i, 'x').zfill(70) for i in range(1, size + 1)] for i in range(1, size + 1): # Construct the state for this root data = {} for key_idx in range(i): key = keys[key_idx] # Calculate unique values based on the key and root val = i + (2 * key_idx) data[key] = str(val).encode() root = merkle.update(data, virtual=False) roots.append(root) store.add_block(str(i), root) return database, store, roots
def __init__(self): self.block_sender = MockBlockSender() self.block_store = BlockStore(DictDatabase()) self.block_cache = BlockCache(self.block_store) self.state_db = {} # add the mock reference to the consensus self.state_db[_setting_address('sawtooth.consensus.algorithm')] = \ _setting_entry('sawtooth.consensus.algorithm', 'test_journal.mock_consensus') self.state_view_factory = MockStateViewFactory(self.state_db) self.signing_key = signing.generate_privkey() self.public_key = signing.encode_pubkey( signing.generate_pubkey(self.signing_key), "hex") self.identity_signing_key = signing.generate_privkey() self.genesis_block = self._generate_genesis_block() self.set_chain_head(self.genesis_block) self.block_publisher = BlockPublisher( transaction_executor=MockTransactionExecutor(), block_cache=self.block_cache, state_view_factory=self.state_view_factory, block_sender=self.block_sender, squash_handler=None, chain_head=self.genesis_block, identity_signing_key=self.identity_signing_key)
def test_state_store_get_and_set(self): """Tests that we correctly get and set state changes to a StateDeltaStore. This tests sets a list of state change values and then gets them back, ensuring that the data is the same. """ database = DictDatabase() delta_store = StateDeltaStore(database) changes = [StateChange(address='a100000' + str(i), value=str(i).encode(), type=StateChange.SET) for i in range(0, 10)] delta_store.save_state_deltas('my_state_root_hash', changes) stored_changes = delta_store.get_state_deltas('my_state_root_hash') # This is a list-like repeated field, but to make it comparable we'll # have to generate a new list stored_changes = [c for c in stored_changes] self.assertEqual(changes, stored_changes)
def test_raise_key_error_on_missing_receipt(self): """Tests that we correctly raise key error on a missing receipt """ receipt_store = TransactionReceiptStore(DictDatabase()) with self.assertRaises(KeyError): receipt_store.get('unknown')
def test_raise_key_error_on_missing_root_hash(self): """Tests that we correctly raise key error on a missing hash """ database = DictDatabase() delta_store = StateDeltaStore(database) with self.assertRaises(KeyError): delta_store.get_state_deltas('unknown_state_root_hash')
def create_chain_commit_state(self, blocks, uncommitted_blocks=None, chain_head=None): block_store = BlockStore(DictDatabase()) block_store.update_chain(blocks) if chain_head is None: chain_head = block_store.chain_head.identifier if uncommitted_blocks is None: uncommitted_blocks = [] return ChainCommitState(block_store, uncommitted_blocks)
def setUp(self): self.block_store = BlockStore(DictDatabase( indexes=BlockStore.create_index_configuration())) self.gossip = MockGossip() self.completer = Completer(self.block_store, self.gossip) self.completer._on_block_received = self._on_block_received self.completer._on_batch_received = self._on_batch_received self.private_key = signing.generate_private_key() self.public_key = signing.generate_public_key(self.private_key) self.blocks = [] self.batches = []
def test_add_subscriber(self): """Test adding a subscriber, who has no known blocks. This scenerio is valid for subscribers who have never connected and would need to receive all deltas since the genesis block. On registration, the subscriber should receive one event, comprised of the state changes for the genesis block. """ mock_service = Mock() block_tree_manager = BlockTreeManager() delta_store = StateDeltaStore(DictDatabase()) delta_processor = StateDeltaProcessor( service=mock_service, state_delta_store=delta_store, block_store=block_tree_manager.block_store) delta_store.save_state_deltas( block_tree_manager.chain_head.state_root_hash, [StateChange(address='deadbeef0000000', value='my_genesis_value'.encode(), type=StateChange.SET), StateChange(address='a14ea01', value='some other state value'.encode(), type=StateChange.SET)]) delta_processor.add_subscriber( 'test_conn_id', [], ['deadbeef']) self.assertEqual(['test_conn_id'], delta_processor.subscriber_ids) # test that it catches up, and receives the events from the chain head. # In this case, it should just be once, as the chain head is the # genesis block chain_head = block_tree_manager.chain_head mock_service.send.assert_called_with( validator_pb2.Message.STATE_DELTA_EVENT, StateDeltaEvent( block_id=chain_head.identifier, block_num=chain_head.block_num, state_root_hash=chain_head.state_root_hash, previous_block_id=chain_head.previous_block_id, state_changes=[StateChange(address='deadbeef0000000', value='my_genesis_value'.encode(), type=StateChange.SET)] ).SerializeToString(), connection_id='test_conn_id')
def setUp(self): database = DictDatabase() state_view_factory = StateViewFactory(database) self._config_view_factory = ConfigViewFactory(state_view_factory) merkle_db = MerkleDatabase(database) self._current_root_hash = merkle_db.update({ TestConfigView._address('my.setting'): TestConfigView._setting_entry('my.setting', '10'), TestConfigView._address('my.setting.list'): TestConfigView._setting_entry('my.setting.list', '10,11,12'), TestConfigView._address('my.other.list'): TestConfigView._setting_entry('my.other.list', '13;14;15') }, virtual=False)
def test_publish_block(self): """ Test that the Journal will produce blocks and consume those blocks to extend the chain. :return: """ # construction and wire the journal to the # gossip layer. btm = BlockTreeManager() journal = None try: journal = Journal( block_store=btm.block_store, block_cache=btm.block_cache, state_view_factory=StateViewFactory(DictDatabase()), block_sender=self.block_sender, batch_sender=self.batch_sender, transaction_executor=self.txn_executor, squash_handler=None, identity_signing_key=btm.identity_signing_key, chain_id_manager=None, state_delta_processor=self.state_delta_processor, data_dir=None, config_dir=None ) self.gossip.on_batch_received = journal.on_batch_received self.gossip.on_block_received = journal.on_block_received journal.start() # feed it a batch batch = Batch() journal.on_batch_received(batch) wait_until(lambda: self.block_sender.new_block is not None, 2) self.assertTrue(self.block_sender.new_block is not None) block = BlockWrapper(self.block_sender.new_block) journal.on_block_received(block) # wait for the chain_head to be updated. wait_until(lambda: btm.chain_head.identifier == block.identifier, 2) self.assertTrue(btm.chain_head.identifier == block.identifier) finally: if journal is not None: journal.stop()
def __init__(self, with_genesis=True): self.block_sender = MockBlockSender() self.batch_sender = MockBatchSender() self.block_store = BlockStore( DictDatabase(indexes=BlockStore.create_index_configuration())) self.block_cache = BlockCache(self.block_store) self.state_db = {} self.block_manager = BlockManager() self.block_manager.add_store("commit_store", self.block_store) # add the mock reference to the consensus consensus_setting_addr = SettingsView.setting_address( 'sawtooth.consensus.algorithm') self.state_db[consensus_setting_addr] = _setting_entry( 'sawtooth.consensus.algorithm', 'test_journal.mock_consensus') self.state_view_factory = MockStateViewFactory(self.state_db) context = create_context('secp256k1') private_key = context.new_random_private_key() crypto_factory = CryptoFactory(context) self.signer = crypto_factory.new_signer(private_key) identity_private_key = context.new_random_private_key() self.identity_signer = crypto_factory.new_signer(identity_private_key) chain_head = None if with_genesis: self.genesis_block = self.generate_genesis_block() chain_head = self.genesis_block self.block_manager.put([chain_head.block]) self.block_manager.persist(chain_head.block.header_signature, "commit_store") self.block_publisher = BlockPublisher( block_manager=self.block_manager, transaction_executor=MockTransactionExecutor(), transaction_committed=self.block_store.has_transaction, batch_committed=self.block_store.has_batch, state_view_factory=self.state_view_factory, settings_cache=SettingsCache( SettingsViewFactory(self.state_view_factory), ), block_sender=self.block_sender, batch_sender=self.block_sender, chain_head=chain_head.block, identity_signer=self.identity_signer, data_dir=None, config_dir=None, permission_verifier=MockPermissionVerifier(), batch_observers=[])
def test_iterate_chain(self): """Given a block store, create an predecessor iterator. 1. Create a chain of length 5. 2. Iterate the chain using the get_predecessor_iter from the chain head 3. Verify that the block ids match the chain, in reverse order """ block_store = BlockStore(DictDatabase()) chain = self._create_chain(5) block_store.update_chain(chain) ids = [b.identifier for b in block_store.get_predecessor_iter()] self.assertEqual(['abcd4', 'abcd3', 'abcd2', 'abcd1', 'abcd0'], ids)
def setUp(self): self.dir = tempfile.mkdtemp() self.block_db = NativeLmdbDatabase( os.path.join(self.dir, 'block.lmdb'), BlockStore.create_index_configuration()) self.block_store = BlockStore(self.block_db) self.receipt_store = TransactionReceiptStore(DictDatabase()) self._txn_ids_by_block_id = {} for block_id, blk_w, txn_ids in create_chain(): self.block_store.put_blocks([blk_w.block]) self._txn_ids_by_block_id[block_id] = txn_ids for txn_id in txn_ids: receipt = create_receipt(txn_id=txn_id, key_values=[("address", block_id)]) self.receipt_store.put(txn_id=txn_id, txn_receipt=receipt)
def setUp(self): self.block_store = BlockStore( DictDatabase(indexes=BlockStore.create_index_configuration())) self.gossip = MockGossip() self.completer = Completer(self.block_store, self.gossip) self.completer._on_block_received = self._on_block_received self.completer._on_batch_received = self._on_batch_received context = create_context('secp256k1') private_key = context.new_random_private_key() crypto_factory = CryptoFactory(context) self.signer = crypto_factory.new_signer(private_key) self.blocks = [] self.batches = []
def test_does_not_require_genesis_block_exists(self): block_store = self.make_block_store({'chain_head_id': 'some_other_id'}) genesis_ctrl = GenesisController(Mock('context_manager'), Mock('txn_executor'), Mock('completer'), block_store, StateViewFactory(DictDatabase()), self._identity_key, data_dir=self._temp_dir, chain_id_manager=ChainIdManager( self._temp_dir), batch_sender=Mock('batch_sender')) self.assertEqual(False, genesis_ctrl.requires_genesis())
def test_requires_genesis(self): self._with_empty_batch_file() genesis_ctrl = GenesisController( Mock('context_manager'), Mock('txn_executor'), Mock('completer'), self.make_block_store(), # Empty block store StateViewFactory(DictDatabase()), self._identity_key, data_dir=self._temp_dir, chain_id_manager=ChainIdManager(self._temp_dir) ) self.assertEqual(True, genesis_ctrl.requires_genesis())
def test_is_valid_subscription_no_known_blocks(self): """Test that a check for a valid subscription with no known block ids returns True. """ mock_service = Mock() block_tree_manager = BlockTreeManager() delta_store = StateDeltaStore(DictDatabase()) delta_processor = StateDeltaProcessor( service=mock_service, state_delta_store=delta_store, block_store=block_tree_manager.block_store) self.assertTrue(delta_processor.is_valid_subscription([]))
def create_chain_commit_state( self, committed_blocks, uncommitted_blocks, head_id, ): block_store = BlockStore( DictDatabase(indexes=BlockStore.create_index_configuration())) block_store.update_chain(committed_blocks) block_cache = BlockCache(block_store=block_store) for block in uncommitted_blocks: block_cache[block.header_signature] = block return ChainCommitState(head_id, block_cache, block_store)