def setUp(self): self.block_tree_manager = BlockTreeManager() self.gossip = MockNetwork() self.executor = SynchronousExecutor() self.txn_executor = MockTransactionExecutor() self.block_sender = MockBlockSender() self.chain_id_manager = MockChainIdManager() def chain_updated(head, committed_batches=None, uncommitted_batches=None): pass self.chain_ctrl = ChainController( block_cache=self.block_tree_manager.block_cache, state_view_factory=MockStateViewFactory( self.block_tree_manager.state_db), block_sender=self.block_sender, executor=self.executor, transaction_executor=MockTransactionExecutor(), on_chain_updated=chain_updated, squash_handler=None, chain_id_manager=self.chain_id_manager, identity_signing_key=self.block_tree_manager.identity_signing_key, data_dir=None) init_root = self.chain_ctrl.chain_head self.assert_is_chain_head(init_root) # create a chain of length 5 extending the root _, head = self.generate_chain(init_root, 5) self.receive_and_process_blocks(head) self.assert_is_chain_head(head) self.init_head = head
def setUp(self): self.state_view_factory = MockStateViewFactory() self.block_tree_manager = BlockTreeManager() self.root = self.block_tree_manager.chain_head self.block_validation_handler = self.BlockValidationHandler()
def test_publish_deltas_no_state_changes(self): """Given a block transition, where no state changes happened (e.g. it only had transactions which did not change state), the StateDeltaProcessor should still publish an event with the block change information. """ mock_service = Mock() block_tree_manager = BlockTreeManager() database = DictDatabase() delta_store = StateDeltaStore(database) delta_processor = StateDeltaProcessor( service=mock_service, state_delta_store=delta_store, block_store=block_tree_manager.block_store) delta_processor.add_subscriber( 'subscriber_conn_id', [block_tree_manager.chain_head.identifier], ['000000']) next_block = block_tree_manager.generate_block() delta_processor.publish_deltas(next_block) mock_service.send.assert_called_with( validator_pb2.Message.STATE_DELTA_EVENT, StateDeltaEvent( block_id=next_block.identifier, block_num=next_block.header.block_num, state_root_hash=next_block.header.state_root_hash, previous_block_id=next_block.header.previous_block_id, state_changes=[]).SerializeToString(), connection_id='subscriber_conn_id')
def setUp(self): self.block_tree_manager = BlockTreeManager() self.block_sender = MockBlockSender() self.batch_sender = MockBatchSender() self.state_view_factory = MockStateViewFactory({}) self.publisher = BlockPublisher( transaction_executor=MockTransactionExecutor(), block_cache=self.block_tree_manager.block_cache, state_view_factory=self.state_view_factory, block_sender=self.block_sender, batch_sender=self.batch_sender, squash_handler=None, chain_head=self.block_tree_manager.chain_head, identity_signing_key=self.block_tree_manager.identity_signing_key, data_dir=None, config_dir=None) self.init_chain_head = self.block_tree_manager.chain_head self.result_block = None # A list of batches is created at the beginning of each test. # The test assertions and the publisher function wrappers # take these batches as a default argument. self.batch_count = 8 self.batches = self.make_batches()
def setUp(self, mock_batch_injector_factory): mock_batch_injector_factory.create_injectors.return_value = [] self.block_tree_manager = BlockTreeManager() self.block_sender = MockBlockSender() self.batch_sender = MockBatchSender() self.state_view_factory = MockStateViewFactory({}) self.permission_verifier = MockPermissionVerifier() self.publisher = BlockPublisher( block_store=self.block_tree_manager.block_store, block_manager=self.block_tree_manager.block_manager, transaction_executor=MockTransactionExecutor(), state_view_factory=self.state_view_factory, block_sender=self.block_sender, batch_sender=self.batch_sender, identity_signer=self.block_tree_manager.identity_signer, data_dir=None, config_dir=None, batch_observers=[], permission_verifier=self.permission_verifier, batch_injector_factory=mock_batch_injector_factory) self.init_chain_head = self.block_tree_manager.chain_head self.result_block = None # A list of batches is created at the beginning of each test. # The test assertions and the publisher function wrappers # take these batches as a default argument. self.batch_count = 8 self.batches = self.make_batches()
def __init__(self): self.block_tree_manager = BlockTreeManager() self.block_sender = MockBlockSender() self.batch_sender = MockBatchSender() self.state_view_factory = MockStateViewFactory({}) self.permission_verifier = MockPermissionVerifier() self.publisher = BlockPublisher( transaction_executor=MockTransactionExecutor(), block_cache=self.block_tree_manager.block_cache, state_view_factory=self.state_view_factory, settings_cache=SettingsCache( SettingsViewFactory( self.block_tree_manager.state_view_factory), ), block_sender=self.block_sender, batch_sender=self.batch_sender, squash_handler=None, chain_head=self.block_tree_manager.chain_head, identity_signer=self.block_tree_manager.identity_signer, data_dir=None, config_dir=None, check_publish_block_frequency=0.1, batch_observers=[], permission_verifier=self.permission_verifier) self.init_chain_head = self.block_tree_manager.chain_head self.result_block = None # A list of batches is created at the beginning of each test. # The test assertions and the publisher function wrappers # take these batches as a default argument. self.batch_count = 8 self.batches = self.make_batches()
def setUp(self): self.block_tree_manager = BlockTreeManager(with_genesis=False) self.gossip = MockNetwork() self.executor = SynchronousExecutor() self.txn_executor = MockTransactionExecutor() self.block_sender = MockBlockSender() self.chain_id_manager = MockChainIdManager() self.state_delta_processor = MockStateDeltaProcessor() self.chain_head_lock = RLock() def chain_updated(head, committed_batches=None, uncommitted_batches=None): pass self.chain_ctrl = ChainController( block_cache=self.block_tree_manager.block_cache, state_view_factory=MockStateViewFactory( self.block_tree_manager.state_db), block_sender=self.block_sender, executor=self.executor, transaction_executor=MockTransactionExecutor(), chain_head_lock=self.chain_head_lock, on_chain_updated=chain_updated, squash_handler=None, chain_id_manager=self.chain_id_manager, state_delta_processor=self.state_delta_processor, identity_signing_key=self.block_tree_manager.identity_signing_key, data_dir=None, config_dir=None) self.assertIsNone(self.chain_ctrl.chain_head)
def __init__(self): self.state_view_factory = MockStateViewFactory() self.block_tree_manager = BlockTreeManager() self.root = self.block_tree_manager.chain_head self.block_validation_handler = self.BlockValidationHandler() self.permission_verifier = MockPermissionVerifier()
class TestChainController(unittest.TestCase): def setUp(self): self.blocks = BlockTreeManager() self.gossip = MockNetwork() self.executor = SynchronousExecutor() self.txn_executor = MockTransactionExecutor() self.block_sender = MockBlockSender() self.state_view_factory = MockStateViewFactory() def chain_updated(head, committed_batches=None, uncommitted_batches=None): pass self.chain_ctrl = ChainController( consensus_module=mock_consensus, block_cache=self.blocks.block_cache, state_view_factory=self.state_view_factory, block_sender=self.block_sender, executor=self.executor, transaction_executor=MockTransactionExecutor(), on_chain_updated=chain_updated, squash_handler=None) def test_simple_case(self): # TEST Run the simple case block_1 = self.blocks.generate_block(self.blocks.chain_head) self.chain_ctrl.on_block_received(block_1) self.executor.process_all() assert(self.chain_ctrl.chain_head.block.header_signature == block_1.header_signature) def test_alternate_genesis(self): # TEST Run generate and alternate genesis block head = self.chain_ctrl.chain_head for b in self.blocks.generate_chain(None, 5, {"add_to_cache": True}): self.chain_ctrl.on_block_received(b) self.executor.process_all() assert(self.chain_ctrl.chain_head.block.header_signature == head.block.header_signature) def test_bad_block_signature(self): # TEST Bad block extending current chain # Bad due to signature head = self.blocks.chain_head block_bad = self.blocks.generate_block(self.blocks.chain_head.block, invalid_signature=True) self.chain_ctrl.on_block_received(block_bad) assert (self.chain_ctrl.chain_head.block.header_signature == head.block.header_signature) def test_bad_block_consensus(self): # Bad due to consensus pass def test_bad_block_transaction(self): # Bad due to transaction pass
class TestChainController(unittest.TestCase): def setUp(self): self.blocks = BlockTreeManager() self.gossip = MockNetwork() self.executor = SynchronousExecutor() self.txn_executor = MockTransactionExecutor() self.block_sender = MockBlockSender() def chain_updated(head): pass self.chain_ctrl = ChainController( consensus=TestModeVerifier(), block_store=self.blocks.block_store, block_sender=self.block_sender, executor=self.executor, transaction_executor=MockTransactionExecutor(), on_chain_updated=chain_updated, squash_handler=None) def test_simple_case(self): # TEST Run the simple case block_1 = self.blocks.generate_block(self.blocks.chain_head) self.chain_ctrl.on_block_received(block_1.get_block()) self.executor.process_all() assert(self.chain_ctrl.chain_head.block.header_signature == block_1.header_signature) def test_alternate_genesis(self): # TEST Run generate and alternate genesis block head = self.chain_ctrl.chain_head other_genesis = self.blocks.generate_block(add_to_store=True) for b in self.blocks.generate_chain(other_genesis, 5): self.chain_ctrl.on_block_received(b.get_block()) self.executor.process_all() assert(self.chain_ctrl.chain_head.block.header_signature == head.block.header_signature) def test_bad_block_signature(self): # TEST Bad block extending current chain # Bad due to signature head = self.blocks.chain_head block_bad = self.blocks.generate_block(self.blocks.chain_head.block, invalid_signature=True) self.chain_ctrl.on_block_received(block_bad.get_block()) assert (self.chain_ctrl.chain_head.block.header_signature == head.block.header_signature) def test_bad_block_consensus(self): # Bad due to consensus pass def test_bad_block_transaction(self): # Bad due to transaction pass
class TestChainController(unittest.TestCase): def setUp(self): self.blocks = BlockTreeManager() self.gossip = MockNetwork() self.executor = SynchronousExecutor() self.txn_executor = MockTransactionExecutor() self.block_sender = MockBlockSender() def chain_updated(head): pass self.chain_ctrl = ChainController( consensus=TestModeVerifier(), block_cache=self.blocks.block_cache, block_sender=self.block_sender, executor=self.executor, transaction_executor=MockTransactionExecutor(), on_chain_updated=chain_updated, squash_handler=None) def test_simple_case(self): # TEST Run the simple case block_1 = self.blocks.generate_block(self.blocks.chain_head) self.chain_ctrl.on_block_received(block_1) self.executor.process_all() assert(self.chain_ctrl.chain_head.block.header_signature == block_1.header_signature) def test_alternate_genesis(self): # TEST Run generate and alternate genesis block head = self.chain_ctrl.chain_head other_genesis = self.blocks.generate_block(add_to_store=True) for b in self.blocks.generate_chain(other_genesis, 5): self.chain_ctrl.on_block_received(b) self.executor.process_all() assert(self.chain_ctrl.chain_head.block.header_signature == head.block.header_signature) def test_bad_block_signature(self): # TEST Bad block extending current chain # Bad due to signature head = self.blocks.chain_head block_bad = self.blocks.generate_block(self.blocks.chain_head.block, invalid_signature=True) self.chain_ctrl.on_block_received(block_bad) assert (self.chain_ctrl.chain_head.block.header_signature == head.block.header_signature) def test_bad_block_consensus(self): # Bad due to consensus pass def test_bad_block_transaction(self): # Bad due to transaction pass
def __init__(self): self.block_tree_manager = BlockTreeManager() self.gossip = MockNetwork() self.txn_executor = MockTransactionExecutor() self.block_sender = MockBlockSender() self.chain_id_manager = MockChainIdManager() self._chain_head_lock = RLock() self.permission_verifier = MockPermissionVerifier() self.state_view_factory = MockStateViewFactory( self.block_tree_manager.state_db) self.transaction_executor = MockTransactionExecutor( batch_execution_result=None) self.executor = SynchronousExecutor() self.block_validator = BlockValidator( state_view_factory=self.state_view_factory, block_cache=self.block_tree_manager.block_cache, transaction_executor=self.transaction_executor, squash_handler=None, identity_signer=self.block_tree_manager.identity_signer, data_dir=None, config_dir=None, permission_verifier=self.permission_verifier, thread_pool=self.executor) def chain_updated(head, committed_batches=None, uncommitted_batches=None): pass self.chain_ctrl = ChainController( block_cache=self.block_tree_manager.block_cache, block_validator=self.block_validator, state_view_factory=self.state_view_factory, chain_head_lock=self._chain_head_lock, on_chain_updated=chain_updated, chain_id_manager=self.chain_id_manager, data_dir=None, config_dir=None, chain_observers=[]) init_root = self.chain_ctrl.chain_head self.assert_is_chain_head(init_root) # create a chain of length 5 extending the root _, head = self.generate_chain(init_root, 5) self.receive_and_process_blocks(head) self.assert_is_chain_head(head) self.init_head = head
def test_register_with_uknown_block_ids(self): """Tests that the handler will respond with an UNKNOWN_BLOCK when a subscriber does not supply a known block id in last_known_block_ids """ block_tree_manager = BlockTreeManager() delta_processor = StateDeltaProcessor( service=Mock(), state_delta_store=Mock(), block_store=block_tree_manager.block_store) handler = StateDeltaSubscriberValidationHandler(delta_processor) request = StateDeltaSubscribeRequest( last_known_block_ids=['a'], address_prefixes=['000000']).SerializeToString() response = handler.handle('test_conn_id', request) self.assertEqual(HandlerStatus.RETURN, response.status) self.assertEqual( StateDeltaSubscribeResponse.UNKNOWN_BLOCK, response.message_out.status)
def test_publish_deltas(self): """Tests that a subscriber filtering on an address prefix receives only the changes in an event that match. """ mock_service = Mock() block_tree_manager = BlockTreeManager() database = DictDatabase() delta_store = StateDeltaStore(database) delta_processor = StateDeltaProcessor( service=mock_service, state_delta_store=delta_store, block_store=block_tree_manager.block_store) delta_processor.add_subscriber( 'test_conn_id', [block_tree_manager.chain_head.identifier], ['deadbeef']) next_block = block_tree_manager.generate_block() # State added during context squash for our block delta_store.save_state_deltas(next_block.header.state_root_hash, [ StateChange(address='deadbeef01', value='my_state_Value'.encode(), type=StateChange.SET), StateChange(address='a14ea01', value='some other state value'.encode(), type=StateChange.SET) ]) # call to publish deltas for that block to the subscribers delta_processor.publish_deltas(next_block) mock_service.send.assert_called_with( validator_pb2.Message.STATE_DELTA_EVENT, StateDeltaEvent( block_id=next_block.identifier, block_num=next_block.header.block_num, state_root_hash=next_block.header.state_root_hash, previous_block_id=next_block.header.previous_block_id, state_changes=[ StateChange(address='deadbeef01', value='my_state_Value'.encode(), type=StateChange.SET) ]).SerializeToString(), connection_id='test_conn_id')
def setUp(self): self.blocks = BlockTreeManager() self.gossip = MockNetwork() self.executor = SynchronousExecutor() self.txn_executor = MockTransactionExecutor() def chain_updated(head): pass self.chain_ctrl = ChainController( consensus=TestModeVerifier(), block_store=self.blocks.block_store, send_message=self.gossip.send_message, executor=self.executor, transaction_executor=MockTransactionExecutor(), on_chain_updated=chain_updated, squash_handler=None)
def test_publish_deltas_subscriber_matches_no_addresses(self): """Given a subscriber whose prefix filters don't match any addresses in the current state delta, it should still receive an event with the block change information. """ mock_service = Mock() block_tree_manager = BlockTreeManager() database = DictDatabase() delta_store = StateDeltaStore(database) delta_processor = StateDeltaProcessor( service=mock_service, state_delta_store=delta_store, block_store=block_tree_manager.block_store) delta_processor.add_subscriber( 'settings_conn_id', [block_tree_manager.chain_head.identifier], ['000000']) next_block = block_tree_manager.generate_block() # State added during context squash for our block delta_store.save_state_deltas( next_block.header.state_root_hash, [StateChange(address='deadbeef01', value='my_state_Value'.encode(), type=StateChange.SET), StateChange(address='a14ea01', value='some other state value'.encode(), type=StateChange.SET)]) # call to publish deltas for that block to the subscribers delta_processor.publish_deltas(next_block) mock_service.send.assert_called_with( validator_pb2.Message.STATE_DELTA_EVENT, StateDeltaEvent( block_id=next_block.identifier, block_num=next_block.header.block_num, state_root_hash=next_block.header.state_root_hash, state_changes=[] ).SerializeToString(), connection_id='settings_conn_id')
def setUp(self): self.blocks = BlockTreeManager() self.gossip = MockNetwork() self.executor = SynchronousExecutor() self.txn_executor = MockTransactionExecutor() self.block_sender = MockBlockSender() def chain_updated(head, committed_batches=None, uncommitted_batches=None): pass self.chain_ctrl = ChainController( consensus=TestModeVerifier(), block_cache=self.blocks.block_cache, block_sender=self.block_sender, executor=self.executor, transaction_executor=MockTransactionExecutor(), on_chain_updated=chain_updated, squash_handler=None)
def test_is_valid_subscription_known_old_chain(self): """Test that a check for a valid subscription where the known block id is a block in the middle of the chain should return True. """ mock_service = Mock() block_tree_manager = BlockTreeManager() chain = block_tree_manager.generate_chain( block_tree_manager.chain_head, 5) # Grab an id from the middle of the chain known_id = chain[3].identifier block_tree_manager.block_store.update_chain(chain) delta_store = StateDeltaStore(DictDatabase()) delta_processor = StateDeltaProcessor( service=mock_service, state_delta_store=delta_store, block_store=block_tree_manager.block_store) self.assertTrue(delta_processor.is_valid_subscription([known_id]))
def setUp(self): self.blocks = BlockTreeManager() self.gossip = MockNetwork() self.executor = SynchronousExecutor() self.txn_executor = MockTransactionExecutor() self.block_sender = MockBlockSender() self.state_view_factory = MockStateViewFactory() def chain_updated(head, committed_batches=None, uncommitted_batches=None): pass self.chain_ctrl = ChainController( consensus_module=mock_consensus, block_cache=self.blocks.block_cache, state_view_factory=self.state_view_factory, block_sender=self.block_sender, executor=self.executor, transaction_executor=MockTransactionExecutor(), on_chain_updated=chain_updated, squash_handler=None)
def test_add_subscriber(self): """Test adding a subscriber, who has no known blocks. This scenerio is valid for subscribers who have never connected and would need to receive all deltas since the genesis block. On registration, the subscriber should receive one event, comprised of the state changes for the genesis block. """ mock_service = Mock() block_tree_manager = BlockTreeManager() delta_store = StateDeltaStore(DictDatabase()) delta_processor = StateDeltaProcessor( service=mock_service, state_delta_store=delta_store, block_store=block_tree_manager.block_store) delta_store.save_state_deltas( block_tree_manager.chain_head.state_root_hash, [StateChange(address='deadbeef0000000', value='my_genesis_value'.encode(), type=StateChange.SET), StateChange(address='a14ea01', value='some other state value'.encode(), type=StateChange.SET)]) delta_processor.add_subscriber( 'test_conn_id', [], ['deadbeef']) self.assertEqual(['test_conn_id'], delta_processor.subscriber_ids) # test that it catches up, and receives the events from the chain head. # In this case, it should just be once, as the chain head is the # genesis block chain_head = block_tree_manager.chain_head mock_service.send.assert_called_with( validator_pb2.Message.STATE_DELTA_EVENT, StateDeltaEvent( block_id=chain_head.identifier, block_num=chain_head.block_num, state_root_hash=chain_head.state_root_hash, previous_block_id=chain_head.previous_block_id, state_changes=[StateChange(address='deadbeef0000000', value='my_genesis_value'.encode(), type=StateChange.SET)] ).SerializeToString(), connection_id='test_conn_id')
def test_add_subscriber_unknown_block_id(self): """Test adding a subscriber, whose known block id is not in the current chain. """ block_tree_manager = BlockTreeManager() delta_processor = StateDeltaProcessor( service=Mock(), state_delta_store=Mock(), block_store=block_tree_manager.block_store) with self.assertRaises(NoKnownBlockError): delta_processor.add_subscriber('test_conn_id', ['deadbeefb10c4'], ['deadbeef'])
def test_publish_block(self): """ Test that the Journal will produce blocks and consume those blocks to extend the chain. :return: """ # construction and wire the journal to the # gossip layer. btm = BlockTreeManager() journal = None try: journal = Journal( block_store=btm.block_store, block_cache=btm.block_cache, state_view_factory=StateViewFactory(DictDatabase()), block_sender=self.block_sender, batch_sender=self.batch_sender, transaction_executor=self.txn_executor, squash_handler=None, identity_signing_key=btm.identity_signing_key, chain_id_manager=None, state_delta_processor=self.state_delta_processor, data_dir=None, config_dir=None ) self.gossip.on_batch_received = journal.on_batch_received self.gossip.on_block_received = journal.on_block_received journal.start() # feed it a batch batch = Batch() journal.on_batch_received(batch) wait_until(lambda: self.block_sender.new_block is not None, 2) self.assertTrue(self.block_sender.new_block is not None) block = BlockWrapper(self.block_sender.new_block) journal.on_block_received(block) # wait for the chain_head to be updated. wait_until(lambda: btm.chain_head.identifier == block.identifier, 2) self.assertTrue(btm.chain_head.identifier == block.identifier) finally: if journal is not None: journal.stop()
def test_is_valid_subscription_no_known_blocks(self): """Test that a check for a valid subscription with no known block ids returns True. """ mock_service = Mock() block_tree_manager = BlockTreeManager() delta_store = StateDeltaStore(DictDatabase()) delta_processor = StateDeltaProcessor( service=mock_service, state_delta_store=delta_store, block_store=block_tree_manager.block_store) self.assertTrue(delta_processor.is_valid_subscription([]))
def test_is_valid_subscription_known_chain_head(self): """Test that a check for a valid subscription with the known block id is the chain head returns True. """ mock_service = Mock() block_tree_manager = BlockTreeManager() delta_store = StateDeltaStore(DictDatabase()) delta_processor = StateDeltaProcessor( service=mock_service, state_delta_store=delta_store, block_store=block_tree_manager.block_store) self.assertTrue(delta_processor.is_valid_subscription([ block_tree_manager.chain_head.identifier]))
def do_test_injector_blockinfoinject_create_batch(): block_info = BlockInfo(block_num=100, signer_public_key=BLOCKINFO_SIGNER_PUBLIC_KEY, header_signature=BLOCKINFO_HEADER_SIGNATURE, timestamp=2364657, previous_block_id=BLOCKINFO_PREVIOUS_BLOCKID) btm = BlockTreeManager() batch_injector_factory=DefaultBatchInjectorFactory(\ block_store=btm.block_store,\ state_view_factory=MockStateViewFactory(btm.state_db),\ signer=btm.identity_signer) blockinjector = BlockInfoInjector(batch_injector_factory._block_store, \ batch_injector_factory._state_view_factory, batch_injector_factory._signer) blockinjector.create_batch(block_info=block_info)
def test_add_subscriber(self): """Test adding a subscriber, who has no known blocks. This scenerio is valid for subscribers who have never connected and would need to receive all deltas since the genesis block. """ mock_service = Mock() block_tree_manager = BlockTreeManager() delta_processor = StateDeltaProcessor( service=mock_service, state_delta_store=Mock(), block_store=block_tree_manager.block_store) delta_processor.add_subscriber('test_conn_id', [], ['deadbeef']) self.assertEqual(['test_conn_id'], delta_processor.subscriber_ids)
def setUp(self): self.blocks = BlockTreeManager() self.gossip = MockNetwork() self.executor = SynchronousExecutor() self.txn_executor = MockTransactionExecutor() self.block_sender = MockBlockSender() def chain_updated(head): pass self.chain_ctrl = ChainController( consensus=TestModeVerifier(), block_cache=self.blocks.block_cache, block_sender=self.block_sender, executor=self.executor, transaction_executor=MockTransactionExecutor(), on_chain_updated=chain_updated, squash_handler=None)
def test_get_events_ignore_bad_blocks(self): """Tests that the GetStateDeltaEventsHandler will return a response containing only the events for blocks that exists. """ block_tree_manager = BlockTreeManager() delta_store = StateDeltaStore(DictDatabase()) delta_store.save_state_deltas( block_tree_manager.chain_head.state_root_hash, [ StateChange(address='deadbeef0000000', value='my_genesis_value'.encode(), type=StateChange.SET), StateChange(address='a14ea01', value='some other state value'.encode(), type=StateChange.SET) ]) handler = GetStateDeltaEventsHandler(block_tree_manager.block_store, delta_store) request = GetStateDeltaEventsRequest( block_ids=[ block_tree_manager.chain_head.identifier, 'somebadblockid' ], address_prefixes=['deadbeef']).SerializeToString() response = handler.handle('test_conn_id', request) self.assertEqual(HandlerStatus.RETURN, response.status) self.assertEqual(GetStateDeltaEventsResponse.OK, response.message_out.status) chain_head = block_tree_manager.chain_head self.assertEqual([ StateDeltaEvent(block_id=chain_head.identifier, block_num=chain_head.block_num, state_root_hash=chain_head.state_root_hash, previous_block_id=chain_head.previous_block_id, state_changes=[ StateChange(address='deadbeef0000000', value='my_genesis_value'.encode(), type=StateChange.SET) ]) ], [event for event in response.message_out.events])
def __init__(self): self.block_tree_manager = BlockTreeManager(with_genesis=False) self.gossip = MockNetwork() self.txn_executor = MockTransactionExecutor() self.block_sender = MockBlockSender() self.chain_id_manager = MockChainIdManager() self.chain_head_lock = RLock() self.permission_verifier = MockPermissionVerifier() self.state_view_factory = MockStateViewFactory( self.block_tree_manager.state_db) self.transaction_executor = MockTransactionExecutor( batch_execution_result=None) self.executor = SynchronousExecutor() self.block_validator = BlockValidator( state_view_factory=self.state_view_factory, block_cache=self.block_tree_manager.block_cache, transaction_executor=self.transaction_executor, squash_handler=None, identity_signer=self.block_tree_manager.identity_signer, data_dir=None, config_dir=None, permission_verifier=self.permission_verifier, thread_pool=self.executor) def chain_updated(head, committed_batches=None, uncommitted_batches=None): pass self.chain_ctrl = ChainController( block_cache=self.block_tree_manager.block_cache, block_validator=self.block_validator, state_view_factory=self.state_view_factory, chain_head_lock=self.chain_head_lock, on_chain_updated=chain_updated, chain_id_manager=self.chain_id_manager, data_dir=None, config_dir=None, chain_observers=[]) self.assertIsNone(self.chain_ctrl.chain_head)
def test_add_subscriber_known_block_id(self): """Test adding a subscriber, whose known block id is the current chainhead. """ mock_service = Mock() block_tree_manager = BlockTreeManager() delta_processor = StateDeltaProcessor( service=mock_service, state_delta_store=Mock(), block_store=block_tree_manager.block_store) delta_processor.add_subscriber( 'test_conn_id', [block_tree_manager.chain_head.identifier], ['deadbeef']) self.assertEqual(['test_conn_id'], delta_processor.subscriber_ids) self.assertTrue(not mock_service.send.called)
def test_add_subscriber(self): """Tests that the handler for adding the subscriptions will properly add a subscriber. """ block_tree_manager = BlockTreeManager() delta_processor = StateDeltaProcessor( service=Mock(), state_delta_store=Mock(), block_store=block_tree_manager.block_store) handler = StateDeltaAddSubscriberHandler(delta_processor) request = RegisterStateDeltaSubscriberRequest( last_known_block_ids=[block_tree_manager.chain_head.identifier], address_prefixes=['0123456']).SerializeToString() response = handler.handle('test_conn_id', request) self.assertEqual(HandlerStatus.PASS, response.status) self.assertEqual(['test_conn_id'], delta_processor.subscriber_ids)
def test_publish_block(self): """ Test that the Journal will produce blocks and consume those blocks to extend the chain. :return: """ # construction and wire the journal to the # gossip layer. btm = BlockTreeManager() journal = None try: journal = Journal(consensus=test_mode_consensus, block_store=btm.block_store.store, block_cache=btm.block_cache, block_sender=self.block_sender, transaction_executor=self.txn_executor, squash_handler=None) self.gossip.on_batch_received = journal.on_batch_received self.gossip.on_block_received = journal.on_block_received journal.start() # feed it a batch batch = Batch() journal.on_batch_received(batch) wait_until(lambda: self.block_sender.new_block is not None, 2) self.assertTrue(self.block_sender.new_block is not None) block = BlockWrapper(self.block_sender.new_block) journal.on_block_received(block) # wait for the chain_head to be updated. wait_until(lambda: btm.chain_head.identifier == block.identifier, 2) self.assertTrue(btm.chain_head.identifier == block.identifier) finally: if journal is not None: journal.stop()
def setUp(self): self.block_tree_manager = BlockTreeManager() self.gossip = MockNetwork() self.executor = SynchronousExecutor() self.txn_executor = MockTransactionExecutor() self.block_sender = MockBlockSender() self.chain_id_manager = MockChainIdManager() self._chain_head_lock = RLock() self.state_delta_processor = MockStateDeltaProcessor() def chain_updated(head, committed_batches=None, uncommitted_batches=None): pass self.chain_ctrl = ChainController( block_cache=self.block_tree_manager.block_cache, state_view_factory=MockStateViewFactory( self.block_tree_manager.state_db), block_sender=self.block_sender, executor=self.executor, transaction_executor=MockTransactionExecutor( batch_execution_result=None), chain_head_lock=self._chain_head_lock, on_chain_updated=chain_updated, squash_handler=None, chain_id_manager=self.chain_id_manager, state_delta_processor=self.state_delta_processor, identity_signing_key=self.block_tree_manager.identity_signing_key, data_dir=None, config_dir=None) init_root = self.chain_ctrl.chain_head self.assert_is_chain_head(init_root) # create a chain of length 5 extending the root _, head = self.generate_chain(init_root, 5) self.receive_and_process_blocks(head) self.assert_is_chain_head(head) self.init_head = head
def setUp(self, mock_batch_injector_factory): mock_batch_injector_factory.create_injectors.return_value = [] self.block_tree_manager = BlockTreeManager() self.block_sender = MockBlockSender() self.batch_sender = MockBatchSender() self.state_view_factory = MockStateViewFactory({}) self.permission_verifier = MockPermissionVerifier() self.publisher = BlockPublisher( block_manager=self.block_tree_manager.block_manager, transaction_executor=MockTransactionExecutor(), transaction_committed=( self.block_tree_manager.block_store.has_transaction ), batch_committed=self.block_tree_manager.block_store.has_batch, state_view_factory=self.state_view_factory, block_sender=self.block_sender, batch_sender=self.batch_sender, chain_head=self.block_tree_manager.chain_head.block, identity_signer=self.block_tree_manager.identity_signer, data_dir=None, config_dir=None, batch_observers=[], permission_verifier=self.permission_verifier, batch_injector_factory=mock_batch_injector_factory) self.init_chain_head = self.block_tree_manager.chain_head self.result_block = None # A list of batches is created at the beginning of each test. # The test assertions and the publisher function wrappers # take these batches as a default argument. self.batch_count = 8 self.batches = self.make_batches()
class TestChainControllerGenesisPeer(unittest.TestCase): def setUp(self): self.block_tree_manager = BlockTreeManager(with_genesis=False) self.gossip = MockNetwork() self.executor = SynchronousExecutor() self.txn_executor = MockTransactionExecutor() self.block_sender = MockBlockSender() self.chain_id_manager = MockChainIdManager() self.state_delta_processor = MockStateDeltaProcessor() self.chain_head_lock = RLock() def chain_updated(head, committed_batches=None, uncommitted_batches=None): pass self.chain_ctrl = ChainController( block_cache=self.block_tree_manager.block_cache, state_view_factory=MockStateViewFactory( self.block_tree_manager.state_db), block_sender=self.block_sender, executor=self.executor, transaction_executor=MockTransactionExecutor(), chain_head_lock=self.chain_head_lock, on_chain_updated=chain_updated, squash_handler=None, chain_id_manager=self.chain_id_manager, state_delta_processor=self.state_delta_processor, identity_signing_key=self.block_tree_manager.identity_signing_key, data_dir=None, config_dir=None) self.assertIsNone(self.chain_ctrl.chain_head) def test_genesis_block_mismatch(self): '''Test mismatch block chain id will drop genesis block. Given a ChainController with an empty chain mismatches the block-chain-id stored on disk. ''' self.chain_id_manager.save_block_chain_id('my_chain_id') some_other_genesis_block = \ self.block_tree_manager.generate_genesis_block() self.chain_ctrl.on_block_received(some_other_genesis_block) self.assertIsNone(self.chain_ctrl.chain_head) def test_genesis_block_matches_block_chain_id(self): '''Test that a validator with no chain will accept a valid genesis block that matches the block-chain-id stored on disk. ''' my_genesis_block = self.block_tree_manager.generate_genesis_block() chain_id = my_genesis_block.header_signature self.chain_id_manager.save_block_chain_id(chain_id) with patch.object(BlockValidator, 'validate_block', return_value=True): self.chain_ctrl.on_block_received(my_genesis_block) self.assertIsNotNone(self.chain_ctrl.chain_head) chain_head_sig = self.chain_ctrl.chain_head.header_signature self.assertEqual( chain_head_sig[:8], chain_id[:8], 'Chain id does not match') self.assertEqual(chain_id, self.chain_id_manager.get_block_chain_id()) def test_invalid_genesis_block_matches_block_chain_id(self): '''Test that a validator with no chain will drop an invalid genesis block that matches the block-chain-id stored on disk. ''' my_genesis_block = self.block_tree_manager.generate_genesis_block() chain_id = my_genesis_block.header_signature self.chain_id_manager.save_block_chain_id(chain_id) with patch.object(BlockValidator, 'validate_block', return_value=False): self.chain_ctrl.on_block_received(my_genesis_block) self.assertIsNone(self.chain_ctrl.chain_head)
class TestBlockValidator(unittest.TestCase): def setUp(self): self.state_view_factory = MockStateViewFactory() self.block_tree_manager = BlockTreeManager() self.root = self.block_tree_manager.chain_head self.block_validation_handler = self.BlockValidationHandler() # fork based tests def test_fork_simple(self): """ Test a simple case of a new block extending the current root. """ new_block = self.block_tree_manager.generate_block( previous_block=self.root) self.validate_block(new_block) self.assert_valid_block(new_block) self.assert_new_block_committed() def test_good_fork_lower(self): """ Test case of a new block extending on a valid chain but not as long as the current chain. """ # create a new valid chain 5 long from the current root chain, head = self.generate_chain_with_head( self.root, 5, {'add_to_store': True}) self.block_tree_manager.set_chain_head(head) # generate candidate chain 3 long from the same root new_chain, new_head = self.generate_chain_with_head( self.root, 3, {'add_to_cache': True}) self.validate_block(new_head) self.assert_valid_block(new_head) self.assert_new_block_not_committed() def test_good_fork_higher(self): """ Test case of a new block extending on a valid chain but longer than the current chain. ( similar to test_good_fork_lower but uses a different code path when finding the common root ) """ # create a new valid chain 5 long from the current root chain, head = self.generate_chain_with_head( self.root, 5, {'add_to_store': True}) self.block_tree_manager.set_chain_head(head) # generate candidate chain 8 long from the same root new_chain, new_head = self.generate_chain_with_head( head, 8, {'add_to_cache': True}) self.validate_block(new_head) self.assert_valid_block(new_head) self.assert_new_block_committed() def test_fork_different_genesis(self): """" Test the case where new block is from a different genesis """ # create a new valid chain 5 long from the current root chain, head = self.generate_chain_with_head( self.root, 5, {'add_to_store': True}) self.block_tree_manager.set_chain_head(head) # generate candidate chain 5 long from its own genesis new_chain, new_head = self.generate_chain_with_head( None, 5, {'add_to_cache': True}) self.validate_block(new_head) self.assert_invalid_block(new_head) self.assert_new_block_not_committed() def test_fork_missing_predecessor(self): """" Test the case where new block is missing the a predecessor """ # generate candidate chain 5 long off the current head. chain, head = self.generate_chain_with_head( self.root, 5, {'add_to_cache': True}) # remove one of the new blocks del self.block_tree_manager.block_cache[chain[1].identifier] self.validate_block(head) self.assert_invalid_block(head) self.assert_new_block_not_committed() def test_fork_invalid_predecessor(self): """" Test the case where new block has an invalid predecessor """ # generate candidate chain 5 long off the current head. chain, head = self.generate_chain_with_head( self.root, 5, {'add_to_cache': True}) # Mark a predecessor as invalid chain[1].status = BlockStatus.Invalid self.validate_block(head) self.assert_invalid_block(head) self.assert_new_block_not_committed() def test_block_bad_consensus(self): """ Test the case where the new block has a bad batch """ chain, head = self.generate_chain_with_head( self.root, 5, {'add_to_store': True}) new_block = self.block_tree_manager.generate_block( previous_block=head, add_to_cache=True, invalid_consensus=True) self.validate_block(new_block) self.assert_invalid_block(new_block) self.assert_new_block_not_committed() def test_block_bad_batch(self): """ Test the case where the new block has a bad batch """ chain, head = self.generate_chain_with_head( self.root, 5, {'add_to_store': True}) new_block = self.block_tree_manager.generate_block( previous_block=head, add_to_cache=True, invalid_batch=True) self.validate_block(new_block) self.assert_invalid_block(new_block) self.assert_new_block_not_committed() def test_block_missing_batch_dependency(self): """ Test the case where the new block has a batch that is missing a dependency. """ pass # assertions def assert_valid_block(self, block): self.assertEqual( block.status, BlockStatus.Valid, "Block should be valid") def assert_invalid_block(self, block): self.assertEqual( block.status, BlockStatus.Invalid, "Block should be invalid") def assert_new_block_committed(self): self.assert_handler_has_result() self.assertTrue( self.block_validation_handler.result["commit_new_block"], "New block not committed, should be") def assert_new_block_not_committed(self): self.assert_handler_has_result() self.assertFalse( self.block_validation_handler.result["commit_new_block"], "New block committed, shouldn't be") def assert_handler_has_result(self): msg = "Validation handler doesn't have result" self.assertTrue(self.block_validation_handler.has_result(), msg) # block validation def validate_block(self, block): validator = self.create_block_validator( block, self.block_validation_handler.on_block_validated) validator.run() def create_block_validator(self, new_block, on_block_validated): return BlockValidator( consensus_module=mock_consensus, new_block=new_block, chain_head=self.block_tree_manager.chain_head, state_view_factory=self.state_view_factory, block_cache=self.block_tree_manager.block_cache, done_cb=on_block_validated, executor=MockTransactionExecutor(batch_execution_result=None), squash_handler=None, identity_signing_key=self.block_tree_manager.identity_signing_key, data_dir=None, config_dir=None) class BlockValidationHandler(object): def __init__(self): self.result = None def on_block_validated(self, commit_new_block, result): result["commit_new_block"] = commit_new_block self.result = result def has_result(self): return self.result is not None # block tree manager interface def generate_chain_with_head(self, root_block, num_blocks, params=None): chain = self.block_tree_manager.generate_chain( root_block, num_blocks, params) head = chain[-1] return chain, head
class BlockStoreTest(unittest.TestCase): def setUp(self): self.block_tree_manager = BlockTreeManager() def test_chain_head(self): """ Test that the chain head can be retrieved from the BlockStore. """ block = self.create_block() block_store = self.create_block_store( { block.header_signature: block }) chain_head = block_store.chain_head self.assert_blocks_equal(chain_head, block) def test_get(self): """ Test BlockStore block get operations. """ block = self.create_block() block_store = self.create_block_store( { block.header_signature: block }) chain_head = block_store[block.header_signature] self.assert_blocks_equal(chain_head, block) with self.assertRaises(KeyError): block_store['txn'] with self.assertRaises(KeyError): chain_head = block_store['missing'] def test_set(self): """ Test BlockStore block set operations. """ block = self.create_block() block_store = self.create_block_store( { block.header_signature: block, }) block2 = self.create_block() with self.assertRaises(KeyError): block_store['head'] = block2 block_store[block2.identifier] = block2 stored_block = block_store[block2.identifier] self.assert_blocks_equal(stored_block, block2) with self.assertRaises(AttributeError): block_store['batch'] = 'head' def test_has(self): """ Test BlockStore tests if Transactions and Batches are commited to the current chain. """ block = self.create_block() block_store = self.create_block_store( { block.header_signature: block }) self.assertTrue(block_store.has_transaction( _get_first_txn_id(block))) self.assertFalse(block_store.has_transaction('txn_missing')) self.assertTrue(block_store.has_batch( _get_first_batch_id(block))) self.assertFalse(block_store.has_transaction('batch_missing')) self.assertTrue(block.header_signature in block_store) self.assertFalse('block_missing' in block_store) self.assertFalse('batch_missing' in block_store) self.assertFalse('txn_missing' in block_store) def test_get_block_by_batch_id(self): """ Test BlockStore retrieval of a Block that contains a specific batch. """ block = self.create_block() block_store = self.create_block_store() block_store.update_chain([block]) batch_id = block.batches[0].header_signature stored = block_store.get_block_by_batch_id(batch_id) self.assert_blocks_equal(stored, block) with self.assertRaises(ValueError): block_store.get_block_by_batch_id("bad") def test_get_batch_by_transaction(self): """ Test BlockStore retrieval of a Batch that contains a specific transaction. """ block = self.create_block() block_store = self.create_block_store() block_store.update_chain([block]) batch = block.batches[0] txn_id = batch.transactions[0].header_signature stored = block_store.get_batch_by_transaction(txn_id) self.asset_protobufs_equal(stored, batch) with self.assertRaises(ValueError): block_store.get_batch_by_transaction("bad") def test_get_block_by_transaction_id(self): """ Test BlockStore retrieval of a Block that contains a specific transaction. """ block = self.create_block() block_store = self.create_block_store() block_store.update_chain([block]) txn_id = block.batches[0].transactions[0].header_signature stored = block_store.get_block_by_transaction_id(txn_id) self.assert_blocks_equal(stored, block) with self.assertRaises(ValueError): stored = block_store.get_block_by_transaction_id("bad") def test_get_batch(self): """ Test BlockStore retrieval of a batch by id. """ block = self.create_block() block_store = self.create_block_store() block_store.update_chain([block]) batch = block.batches[0] batch_id = batch.header_signature stored = block_store.get_batch(batch_id) self.asset_protobufs_equal(stored, batch) with self.assertRaises(ValueError): stored = block_store.get_batch("bad") def test_get_transaction(self): """ Test BlockStore retrieval of a transaction by id. """ block = self.create_block() block_store = self.create_block_store() block_store.update_chain([block]) txn = block.batches[0].transactions[0] txn_id = txn.header_signature stored = block_store.get_transaction(txn_id) self.asset_protobufs_equal(stored, txn) with self.assertRaises(ValueError): stored = block_store.get_transaction("bad") def assert_blocks_equal(self, stored, reference): self.asset_protobufs_equal(stored.block, reference.block) def asset_protobufs_equal(self, stored, reference): self.assertEqual(self.encode(stored), self.encode(reference)) @staticmethod def create_block_store(data=None): return BlockStore(DictDatabase( data, indexes=BlockStore.create_index_configuration())) def create_block(self): return self.block_tree_manager.create_block() @staticmethod def encode(obj): return obj.SerializeToString()
def setUp(self): self.block_tree_manager = BlockTreeManager()
class TestChainController(unittest.TestCase): def setUp(self): self.block_tree_manager = BlockTreeManager() self.gossip = MockNetwork() self.executor = SynchronousExecutor() self.txn_executor = MockTransactionExecutor() self.block_sender = MockBlockSender() self.chain_id_manager = MockChainIdManager() self._chain_head_lock = RLock() self.state_delta_processor = MockStateDeltaProcessor() def chain_updated(head, committed_batches=None, uncommitted_batches=None): pass self.chain_ctrl = ChainController( block_cache=self.block_tree_manager.block_cache, state_view_factory=MockStateViewFactory( self.block_tree_manager.state_db), block_sender=self.block_sender, executor=self.executor, transaction_executor=MockTransactionExecutor( batch_execution_result=None), chain_head_lock=self._chain_head_lock, on_chain_updated=chain_updated, squash_handler=None, chain_id_manager=self.chain_id_manager, state_delta_processor=self.state_delta_processor, identity_signing_key=self.block_tree_manager.identity_signing_key, data_dir=None, config_dir=None) init_root = self.chain_ctrl.chain_head self.assert_is_chain_head(init_root) # create a chain of length 5 extending the root _, head = self.generate_chain(init_root, 5) self.receive_and_process_blocks(head) self.assert_is_chain_head(head) self.init_head = head def test_simple_case(self): new_block = self.generate_block(self.init_head) self.receive_and_process_blocks(new_block) self.assert_is_chain_head(new_block) # validate that the deltas for the new block are published self.assertEqual(new_block, self.state_delta_processor.block) def test_alternate_genesis(self): '''Tests a fork extending an alternate genesis block ''' chain, head = self.generate_chain(None, 5) for block in chain: self.receive_and_process_blocks(block) # make sure initial head is still chain head self.assert_is_chain_head(self.init_head) def test_bad_blocks(self): '''Tests bad blocks extending current chain ''' # Bad due to consensus bad_consen = self.generate_block( previous_block=self.init_head, invalid_consensus=True) # chain head should be the same self.receive_and_process_blocks(bad_consen) self.assert_is_chain_head(self.init_head) # Bad due to transaction bad_batch = self.generate_block( previous_block=self.init_head, invalid_batch=True) # chain head should be the same self.receive_and_process_blocks(bad_batch) self.assert_is_chain_head(self.init_head) # # Ensure good block works good_block = self.generate_block( previous_block=self.init_head) # chain head should be good_block self.receive_and_process_blocks(good_block) self.assert_is_chain_head(good_block) def test_fork_weights(self): '''Tests extending blocks of different weights ''' weight_4 = self.generate_block( previous_block=self.init_head, weight=4) weight_7 = self.generate_block( previous_block=self.init_head, weight=7) weight_8 = self.generate_block( previous_block=self.init_head, weight=8) self.receive_and_process_blocks( weight_7, weight_4, weight_8) self.assert_is_chain_head(weight_8) def test_fork_lengths(self): '''Tests competing forks of different lengths ''' _, head_2 = self.generate_chain(self.init_head, 2) _, head_7 = self.generate_chain(self.init_head, 7) _, head_5 = self.generate_chain(self.init_head, 5) self.receive_and_process_blocks( head_2, head_7, head_5) self.assert_is_chain_head(head_7) def test_advancing_chain(self): '''Tests the chain being advanced between a fork's creation and validation ''' _, fork_5 = self.generate_chain(self.init_head, 5) _, fork_3 = self.generate_chain(self.init_head, 3) self.receive_and_process_blocks(fork_3) self.assert_is_chain_head(fork_3) # fork_5 is longer than fork_3, so it should be accepted self.receive_and_process_blocks(fork_5) self.assert_is_chain_head(fork_5) def test_fork_missing_block(self): '''Tests a fork with a missing block ''' # make new chain new_chain, new_head = self.generate_chain(self.init_head, 5) self.chain_ctrl.on_block_received(new_head) # delete a block from the new chain del self.chain_ctrl._block_cache[new_chain[3].identifier] self.executor.process_all() # chain shouldn't advance self.assert_is_chain_head(self.init_head) # try again, chain still shouldn't advance self.receive_and_process_blocks(new_head) self.assert_is_chain_head(self.init_head) def test_fork_bad_block(self): '''Tests a fork with a bad block in the middle ''' # make two chains extending chain good_chain, good_head = self.generate_chain(self.init_head, 5) bad_chain, bad_head = self.generate_chain(self.init_head, 5) self.chain_ctrl.on_block_received(bad_head) self.chain_ctrl.on_block_received(good_head) # invalidate block in the middle of bad_chain bad_chain[3].status = BlockStatus.Invalid self.executor.process_all() # good_chain should be accepted self.assert_is_chain_head(good_head) def test_advancing_fork(self): '''Tests a fork advancing before getting validated ''' _, fork_head = self.generate_chain(self.init_head, 5) self.chain_ctrl.on_block_received(fork_head) # advance fork before it gets accepted _, ext_head = self.generate_chain(fork_head, 3) self.executor.process_all() self.assert_is_chain_head(fork_head) self.receive_and_process_blocks(ext_head) self.assert_is_chain_head(ext_head) def test_block_extends_in_validation(self): '''Tests a block getting extended while being validated ''' # create candidate block candidate = self.block_tree_manager.generate_block( previous_block=self.init_head) self.assert_is_chain_head(self.init_head) # queue up the candidate block, but don't process self.chain_ctrl.on_block_received(candidate) # create a new block extending the candidate block extending_block = self.block_tree_manager.generate_block( previous_block=candidate) self.assert_is_chain_head(self.init_head) # queue and process the extending block, # which should be the new head self.receive_and_process_blocks(extending_block) self.assert_is_chain_head(extending_block) def test_multiple_extended_forks(self): '''A more involved example of competing forks Three forks of varying lengths a_0, b_0, and c_0 are created extending the existing chain, with c_0 being the longest initially. The chains are extended in the following sequence: 1. Extend all forks by 2. The c fork should remain the head. 2. Extend forks by lenths such that the b fork is the longest. It should be the new head. 3. Extend all forks by 8. The b fork should remain the head. 4. Create a new fork of the initial chain longer than any of the other forks. It should be the new head. ''' # create forks of various lengths _, a_0 = self.generate_chain(self.init_head, 3) _, b_0 = self.generate_chain(self.init_head, 5) _, c_0 = self.generate_chain(self.init_head, 7) self.receive_and_process_blocks(a_0, b_0, c_0) self.assert_is_chain_head(c_0) # extend every fork by 2 _, a_1 = self.generate_chain(a_0, 2) _, b_1 = self.generate_chain(b_0, 2) _, c_1 = self.generate_chain(c_0, 2) self.receive_and_process_blocks(a_1, b_1, c_1) self.assert_is_chain_head(c_1) # extend the forks by different lengths _, a_2 = self.generate_chain(a_1, 1) _, b_2 = self.generate_chain(b_1, 6) _, c_2 = self.generate_chain(c_1, 3) self.receive_and_process_blocks(a_2, b_2, c_2) self.assert_is_chain_head(b_2) # extend every fork by 2 _, a_3 = self.generate_chain(a_2, 8) _, b_3 = self.generate_chain(b_2, 8) _, c_3 = self.generate_chain(c_2, 8) self.receive_and_process_blocks(a_3, b_3, c_3) self.assert_is_chain_head(b_3) # create a new longest chain _, wow = self.generate_chain(self.init_head, 30) self.receive_and_process_blocks(wow) self.assert_is_chain_head(wow) # next multi threaded # next add block publisher # next batch lists # integrate with LMDB # early vs late binding ( class member of consensus BlockPublisher) # helpers def assert_is_chain_head(self, block): chain_head_sig = self.chain_ctrl.chain_head.header_signature block_sig = block.header_signature self.assertEqual( chain_head_sig[:8], block_sig[:8], 'Not chain head') def generate_chain(self, root_block, num_blocks, params={'add_to_cache': True}): '''Returns (chain, chain_head). Usually only the head is needed, but occasionally the chain itself is used. ''' chain = self.block_tree_manager.generate_chain( root_block, num_blocks, params) head = chain[-1] return chain, head def generate_block(self, *args, **kwargs): return self.block_tree_manager.generate_block( *args, **kwargs) def receive_and_process_blocks(self, *blocks): for block in blocks: self.chain_ctrl.on_block_received(block) self.executor.process_all()
class TestBlockPublisher(unittest.TestCase): ''' The block publisher has three main functions, and in these tests those functions are given the following wrappers for convenience: * on_batch_received -> receive_batches * on_chain_updated -> update_chain_head * on_check_publish_block -> publish_block After publishing a block, publish_block sends its block to the mock block sender, and that block is named result_block. This block is what is checked by the test assertions. The basic pattern for the publisher tests (with variations) is: 0) make a list of batches (usually in setUp); 1) receive the batches; 2) publish a block; 3) verify the block (checking that it contains the correct batches, or checking that it doesn't exist, or whatever). The publisher chain head might be updated several times in a test. ''' def setUp(self): self.block_tree_manager = BlockTreeManager() self.block_sender = MockBlockSender() self.batch_sender = MockBatchSender() self.state_view_factory = MockStateViewFactory({}) self.publisher = BlockPublisher( transaction_executor=MockTransactionExecutor(), block_cache=self.block_tree_manager.block_cache, state_view_factory=self.state_view_factory, block_sender=self.block_sender, batch_sender=self.batch_sender, squash_handler=None, chain_head=self.block_tree_manager.chain_head, identity_signing_key=self.block_tree_manager.identity_signing_key, data_dir=None, config_dir=None) self.init_chain_head = self.block_tree_manager.chain_head self.result_block = None # A list of batches is created at the beginning of each test. # The test assertions and the publisher function wrappers # take these batches as a default argument. self.batch_count = 8 self.batches = self.make_batches() def test_publish(self): ''' Publish a block with several batches ''' self.receive_batches() self.publish_block() self.verify_block() def test_reject_duplicate_batches_from_receive(self): ''' Test that duplicate batches from on_batch_received are rejected ''' for _ in range(5): self.receive_batches() self.publish_block() self.verify_block() def test_reject_duplicate_batches_from_store(self): ''' Test that duplicate batches from block store are rejected ''' self.update_chain_head(None) self.update_chain_head( head=self.init_chain_head, uncommitted=self.batches) self.receive_batches() self.publish_block() self.verify_block() def test_no_chain_head(self): ''' Test that nothing gets published with a null chain head, then test that publishing resumes after updating ''' self.update_chain_head(None) self.receive_batches() # try to publish block (failing) self.publish_block() self.assert_no_block_published() # reset chain head several times, # making sure batches remain queued for _ in range(3): self.update_chain_head(None) self.update_chain_head(self.init_chain_head) # try to publish block (succeeding) self.publish_block() self.verify_block() def test_committed_batches(self): ''' Test that batches committed upon updating the chain head are not included in the next block. ''' self.update_chain_head(None) self.update_chain_head( head=self.init_chain_head, committed=self.batches) new_batches = self.make_batches(batch_count=12) self.receive_batches(new_batches) self.publish_block() self.verify_block(new_batches) def test_uncommitted_batches(self): ''' Test that batches uncommitted upon updating the chain head are included in the next block. ''' self.update_chain_head(None) self.update_chain_head( head=self.init_chain_head, uncommitted=self.batches) self.publish_block() self.verify_block() def test_empty_pending_queue(self): ''' Test that no block is published if the pending queue is empty ''' # try to publish with no pending queue (failing) self.publish_block() self.assert_no_block_published() # receive batches, then try again (succeeding) self.receive_batches() self.publish_block() self.verify_block() def test_missing_dependencies(self): ''' Test that no block is published with missing dependencies ''' self.batches = self.make_batches( missing_deps=True) self.receive_batches() self.publish_block() self.assert_no_block_published() def test_batches_rejected_by_scheduler(self): ''' Test that no block is published with batches rejected by the scheduler ''' self.publisher = BlockPublisher( transaction_executor=MockTransactionExecutor( batch_execution_result=False), block_cache=self.block_tree_manager.block_cache, state_view_factory=self.state_view_factory, block_sender=self.block_sender, batch_sender=self.batch_sender, squash_handler=None, chain_head=self.block_tree_manager.chain_head, identity_signing_key=self.block_tree_manager.identity_signing_key, data_dir=None, config_dir=None) self.receive_batches() self.publish_block() self.assert_no_block_published() def test_max_block_size(self): ''' Test block publisher obeys the block size limits ''' # Create a publisher that has a state view # with a batch limit addr, value = CreateSetting( 'sawtooth.publisher.max_batches_per_block', 1) print('test', addr) self.state_view_factory = MockStateViewFactory( {addr: value}) self.publisher = BlockPublisher( transaction_executor=MockTransactionExecutor(), block_cache=self.block_tree_manager.block_cache, state_view_factory=self.state_view_factory, block_sender=self.block_sender, batch_sender=self.batch_sender, squash_handler=None, chain_head=self.block_tree_manager.chain_head, identity_signing_key=self.block_tree_manager.identity_signing_key, data_dir=None, config_dir=None) self.assert_no_block_published() # receive batches, then try again (succeeding) self.receive_batches() # try to publish with no pending queue (failing) for i in range(self.batch_count): self.publish_block() self.assert_block_published() self.update_chain_head(BlockWrapper(self.result_block)) self.verify_block([self.batches[i]]) def test_duplicate_transactions(self): ''' Test discards batches that have duplicate transactions in them. ''' # receive batches, then try again (succeeding) self.batches = self.batches[1:2] self.receive_batches() self.publish_block() self.assert_block_published() self.update_chain_head(BlockWrapper(self.result_block)) self.verify_block() # build a new set of batches with the same transactions in them self.batches = self.make_batches_with_duplicate_txn() self.receive_batches() self.publish_block() self.assert_no_block_published() # block should be empty after batch # with duplicate transaction is dropped. # assertions def assert_block_published(self): self.assertIsNotNone( self.result_block, 'Block should have been published') def assert_no_block_published(self): self.assertIsNone( self.result_block, 'Block should not have been published') def assert_batch_in_block(self, batch): self.assertIn( batch, tuple(self.result_block.batches), 'Batch not in block') def assert_batches_in_block(self, batches=None): if batches is None: batches = self.batches for batch in batches: self.assert_batch_in_block(batch) def assert_block_batch_count(self, batch_count=None): if batch_count is None: batch_count = self.batch_count self.assertEqual( len(self.result_block.batches), batch_count, 'Wrong batch count in block') def verify_block(self, batches=None): if batches is None: batches = self.batches batch_count = None if batches is None else len(batches) self.assert_block_published() self.assert_batches_in_block(batches) self.assert_block_batch_count(batch_count) self.result_block = None # publisher functions def receive_batch(self, batch): self.publisher.on_batch_received(batch) def receive_batches(self, batches=None): if batches is None: batches = self.batches for batch in batches: self.receive_batch(batch) def publish_block(self): self.publisher.on_check_publish_block() self.result_block = self.block_sender.new_block self.block_sender.new_block = None def update_chain_head(self, head, committed=None, uncommitted=None): if head: self.block_tree_manager.block_store.update_chain([head]) self.publisher.on_chain_updated( chain_head=head, committed_batches=committed, uncommitted_batches=uncommitted) # batches def make_batch(self, missing_deps=False): return self.block_tree_manager.generate_batch( missing_deps=missing_deps) def make_batches(self, batch_count=None, missing_deps=False): if batch_count is None: batch_count = self.batch_count return [self.make_batch(missing_deps=missing_deps) for _ in range(batch_count)] def make_batches_with_duplicate_txn(self): txns = [self.batches[0].transactions[0], self.block_tree_manager.generate_transaction("nonce")] return [self.block_tree_manager.generate_batch(txns=txns)]
class TestBlockPublisher(unittest.TestCase): ''' The block publisher has five main functions, and in these tests those functions are given the following wrappers for convenience: * on_batch_received -> receive_batches * on_chain_updated -> update_chain_head * initialize_block -> initialize_block * summarize_block -> summarize_block * finalize_block -> finalize_block Additionally, the publish_block is provided to call both initialize_block and finalize_block. After finalizing a block, finalize_block sends its block to the mock block sender, and that block is named result_block. This block is what is checked by the test assertions. The basic pattern for the publisher tests (with variations) is: 0) make a list of batches (usually in setUp); 1) receive the batches; 2) initialize a block; 3) finalize a block; 4) verify the block (checking that it contains the correct batches, or checking that it doesn't exist, etc.). ''' @unittest.mock.patch('test_journal.mock.MockBatchInjectorFactory') def setUp(self, mock_batch_injector_factory): mock_batch_injector_factory.create_injectors.return_value = [] self.block_tree_manager = BlockTreeManager() self.block_sender = MockBlockSender() self.batch_sender = MockBatchSender() self.state_view_factory = MockStateViewFactory({}) self.permission_verifier = MockPermissionVerifier() self.publisher = BlockPublisher( block_manager=self.block_tree_manager.block_manager, transaction_executor=MockTransactionExecutor(), transaction_committed=( self.block_tree_manager.block_store.has_transaction ), batch_committed=self.block_tree_manager.block_store.has_batch, state_view_factory=self.state_view_factory, block_sender=self.block_sender, batch_sender=self.batch_sender, chain_head=self.block_tree_manager.chain_head.block, identity_signer=self.block_tree_manager.identity_signer, data_dir=None, config_dir=None, batch_observers=[], permission_verifier=self.permission_verifier, batch_injector_factory=mock_batch_injector_factory) self.init_chain_head = self.block_tree_manager.chain_head self.result_block = None # A list of batches is created at the beginning of each test. # The test assertions and the publisher function wrappers # take these batches as a default argument. self.batch_count = 8 self.batches = self.make_batches() def test_publish(self): ''' Publish a block with several batches ''' self.receive_batches() self.publish_block() self.verify_block() def test_receive_after_initialize(self): ''' Receive batches after initialization ''' self.initialize_block() self.receive_batches() self.finalize_block() self.verify_block() def test_summarize_block(self): ''' Initialize a block and summarize it ''' self.receive_batches() self.initialize_block() self.assertIsNotNone(self.summarize_block(), 'Expected block summary') def test_reject_double_initialization(self): ''' Test that you can't initialize a candidate block twice ''' self.initialize_block() with self.assertRaises( BlockInProgress, msg='Second initialization should have rejected'): self.initialize_block() def test_reject_finalize_without_initialize(self): ''' Test that no block is published if the block hasn't been initialized ''' self.receive_batches() with self.assertRaises( BlockNotInitialized, msg='Block should not be finalized'): self.finalize_block() def test_reject_duplicate_batches_from_receive(self): ''' Test that duplicate batches from on_batch_received are rejected ''' for _ in range(5): self.receive_batches() self.publish_block() self.verify_block() def test_reject_duplicate_batches_from_store(self): ''' Test that duplicate batches from block store are rejected ''' self.update_chain_head( head=self.init_chain_head, uncommitted=self.batches) self.receive_batches() self.publish_block() self.verify_block() def test_committed_batches(self): ''' Test that batches committed upon updating the chain head are not included in the next block. ''' self.update_chain_head( head=self.init_chain_head, committed=self.batches) new_batches = self.make_batches(batch_count=12) self.receive_batches(new_batches) self.publish_block() self.verify_block(new_batches) def test_uncommitted_batches(self): ''' Test that batches uncommitted upon updating the chain head are included in the next block. ''' self.update_chain_head( head=self.init_chain_head, uncommitted=self.batches) self.publish_block() self.verify_block() def test_empty_pending_queue(self): ''' Test that no block is published if the pending queue is empty ''' # try to publish with no pending queue (failing) with self.assertRaises( BlockEmpty, msg='Block should not be published'): self.publish_block() self.assert_no_block_published() # receive batches, then try again (succeeding) self.receive_batches() self.finalize_block() self.verify_block() def test_missing_dependencies(self): ''' Test that no block is published with missing dependencies ''' self.batches = self.make_batches( missing_deps=True) self.receive_batches() # Block should be empty, since batches with missing deps aren't added with self.assertRaises(BlockEmpty, msg='Block should be empty'): self.publish_block() self.assert_no_block_published() @unittest.mock.patch('test_journal.mock.MockBatchInjectorFactory') def test_batches_rejected_by_scheduler(self, mock_batch_injector_factory): ''' Test that no block is published with batches rejected by the scheduler ''' mock_batch_injector_factory.create_injectors.return_value = [] self.publisher = BlockPublisher( block_manager=self.block_tree_manager.block_manager, transaction_executor=MockTransactionExecutor( batch_execution_result=False), transaction_committed=( self.block_tree_manager.block_store.has_transaction ), batch_committed=self.block_tree_manager.block_store.has_batch, state_view_factory=self.state_view_factory, block_sender=self.block_sender, batch_sender=self.batch_sender, chain_head=self.block_tree_manager.chain_head.block, identity_signer=self.block_tree_manager.identity_signer, data_dir=None, config_dir=None, batch_observers=[], permission_verifier=self.permission_verifier, batch_injector_factory=mock_batch_injector_factory) self.receive_batches() # Block should be empty since all batches are rejected with self.assertRaises(BlockEmpty, msg='Block should be empty'): self.publish_block() self.assert_no_block_published() @unittest.mock.patch('test_journal.mock.MockBatchInjectorFactory') def test_max_block_size(self, mock_batch_injector_factory): ''' Test block publisher obeys the block size limits ''' mock_batch_injector_factory.create_injectors.return_value = [] # Create a publisher that has a state view # with a batch limit addr, value = CreateSetting( 'sawtooth.publisher.max_batches_per_block', 1) self.state_view_factory = MockStateViewFactory( {addr: value}) self.publisher = BlockPublisher( block_manager=self.block_tree_manager.block_manager, transaction_executor=MockTransactionExecutor(), transaction_committed=( self.block_tree_manager.block_store.has_transaction ), batch_committed=self.block_tree_manager.block_store.has_batch, state_view_factory=self.state_view_factory, block_sender=self.block_sender, batch_sender=self.batch_sender, chain_head=self.block_tree_manager.chain_head.block, identity_signer=self.block_tree_manager.identity_signer, data_dir=None, config_dir=None, batch_observers=[], permission_verifier=self.permission_verifier, batch_injector_factory=mock_batch_injector_factory) self.assert_no_block_published() # receive batches, then try again (succeeding) self.receive_batches() # try to publish with no pending queue (failing) for i in range(self.batch_count): self.publish_block() self.assert_block_published() self.update_chain_head(BlockWrapper(self.result_block)) self.verify_block([self.batches[i]]) def test_duplicate_transactions(self): ''' Test discards batches that have duplicate transactions in them. ''' # receive batches, then try again (succeeding) self.batches = self.batches[1:2] self.receive_batches() self.publish_block() self.assert_block_published() self.update_chain_head(BlockWrapper(self.result_block)) self.verify_block() # build a new set of batches with the same transactions in them self.batches = self.make_batches_with_duplicate_txn() self.receive_batches() with self.assertRaises(BlockEmpty, msg='Block should be empty'): self.publish_block() self.assert_no_block_published() # block should be empty after batch # with duplicate transaction is dropped. def test_batch_injection_start_block(self): ''' Test that the batch is injected at the beginning of the block. ''' injected_batch = self.make_batch() self.publisher = BlockPublisher( block_manager=self.block_tree_manager.block_manager, transaction_executor=MockTransactionExecutor(), transaction_committed=( self.block_tree_manager.block_store.has_transaction ), batch_committed=self.block_tree_manager.block_store.has_batch, state_view_factory=self.state_view_factory, block_sender=self.block_sender, batch_sender=self.batch_sender, chain_head=self.block_tree_manager.chain_head.block, identity_signer=self.block_tree_manager.identity_signer, data_dir=None, config_dir=None, permission_verifier=self.permission_verifier, batch_observers=[], batch_injector_factory=MockBatchInjectorFactory(injected_batch)) self.receive_batches() self.publish_block() self.assert_batch_in_block(injected_batch) @unittest.mock.patch('test_journal.mock.MockBatchInjectorFactory') def test_validation_rules_reject_batches(self, mock_batch_injector_factory): """Test that a batch is not added to the block if it will violate the block validation rules. It does the following: - Sets the block_validation_rules to limit the number of 'test' transactions to 1 - creates two batches, limited to 1 transaction each, and receives them - verifies that only the first batch was committed to the block """ addr, value = CreateSetting( 'sawtooth.validator.block_validation_rules', 'NofX:1,test') self.state_view_factory = MockStateViewFactory( {addr: value}) mock_batch_injector_factory.create_injectors.return_value = [] batch1 = self.make_batch(txn_count=1) batch2 = self.make_batch(txn_count=1) self.publisher = BlockPublisher( block_manager=self.block_tree_manager.block_manager, transaction_executor=MockTransactionExecutor(), transaction_committed=( self.block_tree_manager.block_store.has_transaction ), batch_committed=self.block_tree_manager.block_store.has_batch, state_view_factory=self.state_view_factory, block_sender=self.block_sender, batch_sender=self.batch_sender, chain_head=self.block_tree_manager.chain_head.block, identity_signer=self.block_tree_manager.identity_signer, data_dir=None, config_dir=None, batch_observers=[], permission_verifier=self.permission_verifier, batch_injector_factory=mock_batch_injector_factory) self.receive_batches(batches=[batch1, batch2]) self.publish_block() self.assert_block_batch_count(1) self.assert_batch_in_block(batch1) # assertions def assert_block_published(self): self.assertIsNotNone( self.result_block, 'Block should have been published') def assert_no_block_published(self): self.assertIsNone( self.result_block, 'Block should not have been published') def assert_batch_in_block(self, batch): self.assertIn( batch, tuple(self.result_block.batches), 'Batch not in block') def assert_batches_in_block(self, batches=None): if batches is None: batches = self.batches for batch in batches: self.assert_batch_in_block(batch) def assert_block_batch_count(self, batch_count=None): if batch_count is None: batch_count = self.batch_count self.assertEqual( len(self.result_block.batches), batch_count, 'Wrong batch count in block') def verify_block(self, batches=None): if batches is None: batches = self.batches batch_count = None if batches is None else len(batches) self.assert_block_published() self.assert_batches_in_block(batches) self.assert_block_batch_count(batch_count) self.result_block = None # publisher functions def receive_batch(self, batch): self.publisher.on_batch_received(batch) def receive_batches(self, batches=None): if batches is None: batches = self.batches for batch in batches: self.receive_batch(batch) def initialize_block(self): self.publisher.initialize_block(self.block_tree_manager.chain_head) def summarize_block(self): return self.publisher.summarize_block() def finalize_block(self): self.publisher.finalize_block("") self.result_block = self.block_sender.new_block self.block_sender.new_block = None def publish_block(self): self.initialize_block() self.finalize_block() def update_chain_head(self, head, committed=None, uncommitted=None): if head: self.block_tree_manager.block_store.update_chain([head]) self.publisher.on_chain_updated( chain_head=head, committed_batches=committed, uncommitted_batches=uncommitted) # batches def make_batch(self, missing_deps=False, txn_count=2): return self.block_tree_manager.generate_batch( txn_count=txn_count, missing_deps=missing_deps) def make_batches(self, batch_count=None, missing_deps=False): if batch_count is None: batch_count = self.batch_count return [self.make_batch(missing_deps=missing_deps) for _ in range(batch_count)] def make_batches_with_duplicate_txn(self): txns = [self.batches[0].transactions[0], self.block_tree_manager.generate_transaction("nonce")] return [self.block_tree_manager.generate_batch(txns=txns)]