def setUp(self): self.block_tree_manager = BlockTreeManager(with_genesis=False) self.gossip = MockNetwork() self.executor = SynchronousExecutor() self.txn_executor = MockTransactionExecutor() self.block_sender = MockBlockSender() self.chain_id_manager = MockChainIdManager() self.state_delta_processor = MockStateDeltaProcessor() self.chain_head_lock = RLock() def chain_updated(head, committed_batches=None, uncommitted_batches=None): pass self.chain_ctrl = ChainController( block_cache=self.block_tree_manager.block_cache, state_view_factory=MockStateViewFactory( self.block_tree_manager.state_db), block_sender=self.block_sender, executor=self.executor, transaction_executor=MockTransactionExecutor(), chain_head_lock=self.chain_head_lock, on_chain_updated=chain_updated, squash_handler=None, chain_id_manager=self.chain_id_manager, state_delta_processor=self.state_delta_processor, identity_signing_key=self.block_tree_manager.identity_signing_key, data_dir=None, config_dir=None) self.assertIsNone(self.chain_ctrl.chain_head)
def setUp(self): self.block_tree_manager = BlockTreeManager() self.gossip = MockNetwork() self.executor = SynchronousExecutor() self.txn_executor = MockTransactionExecutor() self.block_sender = MockBlockSender() self.chain_id_manager = MockChainIdManager() def chain_updated(head, committed_batches=None, uncommitted_batches=None): pass self.chain_ctrl = ChainController( block_cache=self.block_tree_manager.block_cache, state_view_factory=MockStateViewFactory( self.block_tree_manager.state_db), block_sender=self.block_sender, executor=self.executor, transaction_executor=MockTransactionExecutor(), on_chain_updated=chain_updated, squash_handler=None, chain_id_manager=self.chain_id_manager, identity_signing_key=self.block_tree_manager.identity_signing_key, data_dir=None) init_root = self.chain_ctrl.chain_head self.assert_is_chain_head(init_root) # create a chain of length 5 extending the root _, head = self.generate_chain(init_root, 5) self.receive_and_process_blocks(head) self.assert_is_chain_head(head) self.init_head = head
def __init__(self): self.block_tree_manager = BlockTreeManager() self.gossip = MockNetwork() self.txn_executor = MockTransactionExecutor() self.block_sender = MockBlockSender() self.chain_id_manager = MockChainIdManager() self._chain_head_lock = RLock() self.permission_verifier = MockPermissionVerifier() self.state_view_factory = MockStateViewFactory( self.block_tree_manager.state_db) self.transaction_executor = MockTransactionExecutor( batch_execution_result=None) self.executor = SynchronousExecutor() self.block_validator = BlockValidator( state_view_factory=self.state_view_factory, block_cache=self.block_tree_manager.block_cache, transaction_executor=self.transaction_executor, squash_handler=None, identity_signer=self.block_tree_manager.identity_signer, data_dir=None, config_dir=None, permission_verifier=self.permission_verifier, thread_pool=self.executor) def chain_updated(head, committed_batches=None, uncommitted_batches=None): pass self.chain_ctrl = ChainController( block_cache=self.block_tree_manager.block_cache, block_validator=self.block_validator, state_view_factory=self.state_view_factory, chain_head_lock=self._chain_head_lock, on_chain_updated=chain_updated, chain_id_manager=self.chain_id_manager, data_dir=None, config_dir=None, chain_observers=[]) init_root = self.chain_ctrl.chain_head self.assert_is_chain_head(init_root) # create a chain of length 5 extending the root _, head = self.generate_chain(init_root, 5) self.receive_and_process_blocks(head) self.assert_is_chain_head(head) self.init_head = head
def setUp(self): self.block_tree_manager = BlockTreeManager(with_genesis=False) self.gossip = MockNetwork() self.executor = SynchronousExecutor() self.txn_executor = MockTransactionExecutor() self.block_sender = MockBlockSender() self.chain_id_manager = MockChainIdManager() self.state_delta_processor = MockStateDeltaProcessor() self.chain_head_lock = RLock() def chain_updated(head, committed_batches=None, uncommitted_batches=None): pass self.chain_ctrl = ChainController( block_cache=self.block_tree_manager.block_cache, state_view_factory=MockStateViewFactory( self.block_tree_manager.state_db), block_sender=self.block_sender, executor=self.executor, transaction_executor=MockTransactionExecutor(), chain_head_lock=self.chain_head_lock, on_chain_updated=chain_updated, squash_handler=None, chain_id_manager=self.chain_id_manager, state_delta_processor=self.state_delta_processor, identity_signing_key=self.block_tree_manager.identity_signing_key, data_dir=None, config_dir=None) self.assertIsNone(self.chain_ctrl.chain_head)
def _init_subprocesses(self): self._block_publisher = BlockPublisher( transaction_executor=self._transaction_executor, block_cache=self._block_cache, state_view_factory=self._state_view_factory, block_sender=self._block_sender, squash_handler=self._squash_handler, chain_head=self._block_store.chain_head, identity_signing_key=self._identity_signing_key ) self._publisher_thread = self._PublisherThread(self._block_publisher, self._batch_queue) self._chain_controller = ChainController( block_sender=self._block_sender, block_cache=self._block_cache, state_view_factory=self._state_view_factory, executor=ThreadPoolExecutor(1), transaction_executor=self._transaction_executor, on_chain_updated=self._block_publisher.on_chain_updated, squash_handler=self._squash_handler, chain_id_manager=self._chain_id_manager ) self._chain_thread = self._ChainThread(self._chain_controller, self._block_queue, self._block_cache)
def _init_subprocesses(self): self._block_publisher = BlockPublisher( transaction_executor=self._transaction_executor, block_cache=self._block_cache, state_view_factory=self._state_view_factory, block_sender=self._block_sender, batch_sender=self._batch_sender, squash_handler=self._squash_handler, chain_head=self._block_store.chain_head, identity_signing_key=self._identity_signing_key, data_dir=self._data_dir, config_dir=self._config_dir) self._publisher_thread = self._PublisherThread( block_publisher=self._block_publisher, batch_queue=self._batch_queue, check_publish_block_frequency=self._check_publish_block_frequency) self._chain_controller = ChainController( block_sender=self._block_sender, block_cache=self._block_cache, state_view_factory=self._state_view_factory, executor=self._executor_threadpool, transaction_executor=self._transaction_executor, chain_head_lock=self._block_publisher.chain_head_lock, on_chain_updated=self._block_publisher.on_chain_updated, squash_handler=self._squash_handler, chain_id_manager=self._chain_id_manager, state_delta_processor=self._state_delta_processor, identity_signing_key=self._identity_signing_key, data_dir=self._data_dir, config_dir=self._config_dir) self._chain_thread = self._ChainThread( chain_controller=self._chain_controller, block_queue=self._block_queue, block_cache=self._block_cache, block_cache_purge_frequency=self._block_cache_purge_frequency)
def setUp(self): self.blocks = BlockTreeManager() self.gossip = MockNetwork() self.executor = SynchronousExecutor() self.txn_executor = MockTransactionExecutor() def chain_updated(head): pass self.chain_ctrl = ChainController( consensus=TestModeVerifier(), block_store=self.blocks.block_store, send_message=self.gossip.send_message, executor=self.executor, transaction_executor=MockTransactionExecutor(), on_chain_updated=chain_updated, squash_handler=None)
def setUp(self): self.blocks = BlockTreeManager() self.gossip = MockNetwork() self.executor = SynchronousExecutor() self.txn_executor = MockTransactionExecutor() self.block_sender = MockBlockSender() def chain_updated(head, committed_batches=None, uncommitted_batches=None): pass self.chain_ctrl = ChainController( consensus=TestModeVerifier(), block_cache=self.blocks.block_cache, block_sender=self.block_sender, executor=self.executor, transaction_executor=MockTransactionExecutor(), on_chain_updated=chain_updated, squash_handler=None)
def setUp(self): self.blocks = BlockTreeManager() self.gossip = MockNetwork() self.executor = SynchronousExecutor() self.txn_executor = MockTransactionExecutor() self.block_sender = MockBlockSender() self.state_view_factory = MockStateViewFactory() def chain_updated(head, committed_batches=None, uncommitted_batches=None): pass self.chain_ctrl = ChainController( consensus_module=mock_consensus, block_cache=self.blocks.block_cache, state_view_factory=self.state_view_factory, block_sender=self.block_sender, executor=self.executor, transaction_executor=MockTransactionExecutor(), on_chain_updated=chain_updated, squash_handler=None)
class TestChainController(unittest.TestCase): def setUp(self): self.blocks = BlockTreeManager() self.gossip = MockNetwork() self.executor = SynchronousExecutor() self.txn_executor = MockTransactionExecutor() self.block_sender = MockBlockSender() self.state_view_factory = MockStateViewFactory() def chain_updated(head, committed_batches=None, uncommitted_batches=None): pass self.chain_ctrl = ChainController( consensus_module=mock_consensus, block_cache=self.blocks.block_cache, state_view_factory=self.state_view_factory, block_sender=self.block_sender, executor=self.executor, transaction_executor=MockTransactionExecutor(), on_chain_updated=chain_updated, squash_handler=None) def test_simple_case(self): # TEST Run the simple case block_1 = self.blocks.generate_block(self.blocks.chain_head) self.chain_ctrl.on_block_received(block_1) self.executor.process_all() assert(self.chain_ctrl.chain_head.block.header_signature == block_1.header_signature) def test_alternate_genesis(self): # TEST Run generate and alternate genesis block head = self.chain_ctrl.chain_head for b in self.blocks.generate_chain(None, 5, {"add_to_cache": True}): self.chain_ctrl.on_block_received(b) self.executor.process_all() assert(self.chain_ctrl.chain_head.block.header_signature == head.block.header_signature) def test_bad_block_signature(self): # TEST Bad block extending current chain # Bad due to signature head = self.blocks.chain_head block_bad = self.blocks.generate_block(self.blocks.chain_head.block, invalid_signature=True) self.chain_ctrl.on_block_received(block_bad) assert (self.chain_ctrl.chain_head.block.header_signature == head.block.header_signature) def test_bad_block_consensus(self): # Bad due to consensus pass def test_bad_block_transaction(self): # Bad due to transaction pass
class TestChainController(unittest.TestCase): def setUp(self): self.blocks = BlockTreeManager() self.gossip = MockNetwork() self.executor = SynchronousExecutor() self.txn_executor = MockTransactionExecutor() self.block_sender = MockBlockSender() def chain_updated(head): pass self.chain_ctrl = ChainController( consensus=TestModeVerifier(), block_cache=self.blocks.block_cache, block_sender=self.block_sender, executor=self.executor, transaction_executor=MockTransactionExecutor(), on_chain_updated=chain_updated, squash_handler=None) def test_simple_case(self): # TEST Run the simple case block_1 = self.blocks.generate_block(self.blocks.chain_head) self.chain_ctrl.on_block_received(block_1) self.executor.process_all() assert(self.chain_ctrl.chain_head.block.header_signature == block_1.header_signature) def test_alternate_genesis(self): # TEST Run generate and alternate genesis block head = self.chain_ctrl.chain_head other_genesis = self.blocks.generate_block(add_to_store=True) for b in self.blocks.generate_chain(other_genesis, 5): self.chain_ctrl.on_block_received(b) self.executor.process_all() assert(self.chain_ctrl.chain_head.block.header_signature == head.block.header_signature) def test_bad_block_signature(self): # TEST Bad block extending current chain # Bad due to signature head = self.blocks.chain_head block_bad = self.blocks.generate_block(self.blocks.chain_head.block, invalid_signature=True) self.chain_ctrl.on_block_received(block_bad) assert (self.chain_ctrl.chain_head.block.header_signature == head.block.header_signature) def test_bad_block_consensus(self): # Bad due to consensus pass def test_bad_block_transaction(self): # Bad due to transaction pass
class TestChainController(unittest.TestCase): def setUp(self): self.blocks = BlockTreeManager() self.gossip = MockNetwork() self.executor = SynchronousExecutor() self.txn_executor = MockTransactionExecutor() self.block_sender = MockBlockSender() def chain_updated(head): pass self.chain_ctrl = ChainController( consensus=TestModeVerifier(), block_store=self.blocks.block_store, block_sender=self.block_sender, executor=self.executor, transaction_executor=MockTransactionExecutor(), on_chain_updated=chain_updated, squash_handler=None) def test_simple_case(self): # TEST Run the simple case block_1 = self.blocks.generate_block(self.blocks.chain_head) self.chain_ctrl.on_block_received(block_1.get_block()) self.executor.process_all() assert(self.chain_ctrl.chain_head.block.header_signature == block_1.header_signature) def test_alternate_genesis(self): # TEST Run generate and alternate genesis block head = self.chain_ctrl.chain_head other_genesis = self.blocks.generate_block(add_to_store=True) for b in self.blocks.generate_chain(other_genesis, 5): self.chain_ctrl.on_block_received(b.get_block()) self.executor.process_all() assert(self.chain_ctrl.chain_head.block.header_signature == head.block.header_signature) def test_bad_block_signature(self): # TEST Bad block extending current chain # Bad due to signature head = self.blocks.chain_head block_bad = self.blocks.generate_block(self.blocks.chain_head.block, invalid_signature=True) self.chain_ctrl.on_block_received(block_bad.get_block()) assert (self.chain_ctrl.chain_head.block.header_signature == head.block.header_signature) def test_bad_block_consensus(self): # Bad due to consensus pass def test_bad_block_transaction(self): # Bad due to transaction pass
def _init_subprocesses(self): batch_injector_factory = DefaultBatchInjectorFactory( block_store=self._block_store, state_view_factory=self._state_view_factory, signing_key=self._identity_signing_key, ) self._block_publisher = BlockPublisher( transaction_executor=self._transaction_executor, block_cache=self._block_cache, state_view_factory=self._state_view_factory, block_sender=self._block_sender, batch_sender=self._batch_sender, squash_handler=self._squash_handler, chain_head=self._block_store.chain_head, identity_signing_key=self._identity_signing_key, data_dir=self._data_dir, config_dir=self._config_dir, permission_verifier=self._permission_verifier, batch_injector_factory=batch_injector_factory, ) self._publisher_thread = self._PublisherThread( block_publisher=self._block_publisher, batch_queue=self._batch_queue, check_publish_block_frequency=self._check_publish_block_frequency ) self._chain_controller = ChainController( block_sender=self._block_sender, block_cache=self._block_cache, state_view_factory=self._state_view_factory, executor=self._executor_threadpool, transaction_executor=self._transaction_executor, chain_head_lock=self._block_publisher.chain_head_lock, on_chain_updated=self._block_publisher.on_chain_updated, squash_handler=self._squash_handler, chain_id_manager=self._chain_id_manager, identity_signing_key=self._identity_signing_key, data_dir=self._data_dir, config_dir=self._config_dir, permission_verifier=self._permission_verifier, chain_observers=self._chain_observers, ) self._chain_thread = self._ChainThread( chain_controller=self._chain_controller, block_queue=self._block_queue, block_cache=self._block_cache)
def setUp(self): self.blocks = BlockTreeManager() self.gossip = MockNetwork() self.executor = SynchronousExecutor() self.txn_executor = MockTransactionExecutor() self.block_sender = MockBlockSender() def chain_updated(head): pass self.chain_ctrl = ChainController( consensus=TestModeVerifier(), block_cache=self.blocks.block_cache, block_sender=self.block_sender, executor=self.executor, transaction_executor=MockTransactionExecutor(), on_chain_updated=chain_updated, squash_handler=None)
def __init__(self): self.block_tree_manager = BlockTreeManager(with_genesis=False) self.gossip = MockNetwork() self.txn_executor = MockTransactionExecutor() self.block_sender = MockBlockSender() self.chain_id_manager = MockChainIdManager() self.chain_head_lock = RLock() self.permission_verifier = MockPermissionVerifier() self.state_view_factory = MockStateViewFactory( self.block_tree_manager.state_db) self.transaction_executor = MockTransactionExecutor( batch_execution_result=None) self.executor = SynchronousExecutor() self.block_validator = BlockValidator( state_view_factory=self.state_view_factory, block_cache=self.block_tree_manager.block_cache, transaction_executor=self.transaction_executor, squash_handler=None, identity_signer=self.block_tree_manager.identity_signer, data_dir=None, config_dir=None, permission_verifier=self.permission_verifier, thread_pool=self.executor) def chain_updated(head, committed_batches=None, uncommitted_batches=None): pass self.chain_ctrl = ChainController( block_cache=self.block_tree_manager.block_cache, block_validator=self.block_validator, state_view_factory=self.state_view_factory, chain_head_lock=self.chain_head_lock, on_chain_updated=chain_updated, chain_id_manager=self.chain_id_manager, data_dir=None, config_dir=None, chain_observers=[]) self.assertIsNone(self.chain_ctrl.chain_head)
def __init__(self, consensus, block_store, send_message, transaction_executor, squash_handler, first_state_root): self._consensus = consensus self._block_store = block_store self._send_message = send_message self._squash_handler = squash_handler self._block_publisher = BlockPublisher( consensus=consensus.BlockPublisher(), transaction_executor=transaction_executor, send_message=send_message, squash_handler=squash_handler) self._batch_queue = queue.Queue() self._publisher_thread = self._PublisherThread(self._block_publisher, self._batch_queue) # HACK until genesis tool is working if "chain_head_id" not in self._block_store: genesis_block = BlockState( block_wrapper=self._block_publisher.generate_genesis_block(), weight=0, status=BlockStatus.Valid) genesis_block.block.set_state_hash(first_state_root) self._block_store[genesis_block.block.header_signature] = \ genesis_block self._block_store["chain_head_id"] = \ genesis_block.block.header_signature self._block_publisher.on_chain_updated(genesis_block.block) LOGGER.info("Journal created genesis block: %s", genesis_block.block.header_signature) self._chain_controller = ChainController( consensus=consensus.BlockVerifier(), block_store=block_store, send_message=send_message, executor=ThreadPoolExecutor(1), transaction_executor=transaction_executor, on_chain_updated=self._block_publisher.on_chain_updated, squash_handler=self._squash_handler) self._block_queue = queue.Queue() self._chain_thread = self._ChainThread(self._chain_controller, self._block_queue)
def _init_subprocesses(self): self._block_publisher = BlockPublisher( consensus=self._consensus.BlockPublisher(), transaction_executor=self._transaction_executor, block_sender=self._block_sender, squash_handler=self._squash_handler, chain_head=self._block_store.chain_head) self._publisher_thread = self._PublisherThread(self._block_publisher, self._batch_queue) self._chain_controller = ChainController( consensus=self._consensus.BlockVerifier(), block_sender=self._block_sender, block_cache=self._block_cache, executor=ThreadPoolExecutor(1), transaction_executor=self._transaction_executor, on_chain_updated=self._block_publisher.on_chain_updated, squash_handler=self._squash_handler) self._chain_thread = self._ChainThread(self._chain_controller, self._block_queue, self._block_cache)
def setUp(self): self.block_tree_manager = BlockTreeManager() self.gossip = MockNetwork() self.executor = SynchronousExecutor() self.txn_executor = MockTransactionExecutor() self.block_sender = MockBlockSender() self.chain_id_manager = MockChainIdManager() self._chain_head_lock = RLock() self.state_delta_processor = MockStateDeltaProcessor() def chain_updated(head, committed_batches=None, uncommitted_batches=None): pass self.chain_ctrl = ChainController( block_cache=self.block_tree_manager.block_cache, state_view_factory=MockStateViewFactory( self.block_tree_manager.state_db), block_sender=self.block_sender, executor=self.executor, transaction_executor=MockTransactionExecutor( batch_execution_result=None), chain_head_lock=self._chain_head_lock, on_chain_updated=chain_updated, squash_handler=None, chain_id_manager=self.chain_id_manager, state_delta_processor=self.state_delta_processor, identity_signing_key=self.block_tree_manager.identity_signing_key, data_dir=None, config_dir=None) init_root = self.chain_ctrl.chain_head self.assert_is_chain_head(init_root) # create a chain of length 5 extending the root _, head = self.generate_chain(init_root, 5) self.receive_and_process_blocks(head) self.assert_is_chain_head(head) self.init_head = head
def __init__(self, bind_network, bind_component, bind_consensus, endpoint, peering, seeds_list, peer_list, data_dir, config_dir, identity_signer, scheduler_type, permissions, minimum_peer_connectivity, maximum_peer_connectivity, state_pruning_block_depth, network_public_key=None, network_private_key=None, roles=None): """Constructs a validator instance. Args: bind_network (str): the network endpoint bind_component (str): the component endpoint endpoint (str): the zmq-style URI of this validator's publically reachable endpoint peering (str): The type of peering approach. Either 'static' or 'dynamic'. In 'static' mode, no attempted topology buildout occurs -- the validator only attempts to initiate peering connections with endpoints specified in the peer_list. In 'dynamic' mode, the validator will first attempt to initiate peering connections with endpoints specified in the peer_list and then attempt to do a topology buildout starting with peer lists obtained from endpoints in the seeds_list. In either mode, the validator will accept incoming peer requests up to max_peers. seeds_list (list of str): a list of addresses to connect to in order to perform the initial topology buildout peer_list (list of str): a list of peer addresses data_dir (str): path to the data directory config_dir (str): path to the config directory identity_signer (str): cryptographic signer the validator uses for signing """ # -- Setup Global State Database and Factory -- # global_state_db_filename = os.path.join( data_dir, 'merkle-{}.lmdb'.format(bind_network[-2:])) LOGGER.debug('global state database file is %s', global_state_db_filename) global_state_db = NativeLmdbDatabase( global_state_db_filename, indexes=MerkleDatabase.create_index_configuration()) state_view_factory = StateViewFactory(global_state_db) # -- Setup Receipt Store -- # receipt_db_filename = os.path.join( data_dir, 'txn_receipts-{}.lmdb'.format(bind_network[-2:])) LOGGER.debug('txn receipt store file is %s', receipt_db_filename) receipt_db = LMDBNoLockDatabase(receipt_db_filename, 'c') receipt_store = TransactionReceiptStore(receipt_db) # -- Setup Block Store -- # block_db_filename = os.path.join( data_dir, 'block-{}.lmdb'.format(bind_network[-2:])) LOGGER.debug('block store file is %s', block_db_filename) block_db = IndexedDatabase( block_db_filename, BlockStore.serialize_block, BlockStore.deserialize_block, flag='c', indexes=BlockStore.create_index_configuration()) block_store = BlockStore(block_db) # The cache keep time for the journal's block cache must be greater # than the cache keep time used by the completer. base_keep_time = 1200 block_cache = BlockCache(block_store, keep_time=int(base_keep_time * 9 / 8), purge_frequency=30) # -- Setup Thread Pools -- # component_thread_pool = InstrumentedThreadPoolExecutor( max_workers=10, name='Component') network_thread_pool = InstrumentedThreadPoolExecutor(max_workers=10, name='Network') client_thread_pool = InstrumentedThreadPoolExecutor(max_workers=5, name='Client') sig_pool = InstrumentedThreadPoolExecutor(max_workers=3, name='Signature') # -- Setup Dispatchers -- # component_dispatcher = Dispatcher() network_dispatcher = Dispatcher() # -- Setup Services -- # component_service = Interconnect(bind_component, component_dispatcher, secured=False, heartbeat=False, max_incoming_connections=20, monitor=True, max_future_callback_workers=10) zmq_identity = hashlib.sha512( time.time().hex().encode()).hexdigest()[:23] secure = False if network_public_key is not None and network_private_key is not None: secure = True network_service = Interconnect(bind_network, dispatcher=network_dispatcher, zmq_identity=zmq_identity, secured=secure, server_public_key=network_public_key, server_private_key=network_private_key, heartbeat=True, public_endpoint=endpoint, connection_timeout=120, max_incoming_connections=100, max_future_callback_workers=10, authorize=True, signer=identity_signer, roles=roles) # -- Setup Transaction Execution Platform -- # context_manager = ContextManager(global_state_db) batch_tracker = BatchTracker(block_store) settings_cache = SettingsCache( SettingsViewFactory(state_view_factory), ) transaction_executor = TransactionExecutor( service=component_service, context_manager=context_manager, settings_view_factory=SettingsViewFactory(state_view_factory), scheduler_type=scheduler_type, invalid_observers=[batch_tracker]) component_service.set_check_connections( transaction_executor.check_connections) event_broadcaster = EventBroadcaster(component_service, block_store, receipt_store) # -- Setup P2P Networking -- # gossip = Gossip(network_service, settings_cache, lambda: block_store.chain_head, block_store.chain_head_state_root, endpoint=endpoint, peering_mode=peering, initial_seed_endpoints=seeds_list, initial_peer_endpoints=peer_list, minimum_peer_connectivity=minimum_peer_connectivity, maximum_peer_connectivity=maximum_peer_connectivity, topology_check_frequency=1) completer = Completer(block_store, gossip, cache_keep_time=base_keep_time, cache_purge_frequency=30, requested_keep_time=300) block_sender = BroadcastBlockSender(completer, gossip) batch_sender = BroadcastBatchSender(completer, gossip) chain_id_manager = ChainIdManager(data_dir) identity_view_factory = IdentityViewFactory( StateViewFactory(global_state_db)) id_cache = IdentityCache(identity_view_factory) # -- Setup Permissioning -- # permission_verifier = PermissionVerifier( permissions, block_store.chain_head_state_root, id_cache) identity_observer = IdentityObserver(to_update=id_cache.invalidate, forked=id_cache.forked) settings_observer = SettingsObserver( to_update=settings_cache.invalidate, forked=settings_cache.forked) # -- Consensus Engine -- # consensus_thread_pool = InstrumentedThreadPoolExecutor( max_workers=3, name='Consensus') consensus_dispatcher = Dispatcher() consensus_service = Interconnect(bind_consensus, consensus_dispatcher, secured=False, heartbeat=False, max_incoming_connections=20, monitor=True, max_future_callback_workers=10) consensus_notifier = ConsensusNotifier(consensus_service) # -- Setup Journal -- # batch_injector_factory = DefaultBatchInjectorFactory( block_cache=block_cache, state_view_factory=state_view_factory, signer=identity_signer) block_publisher = BlockPublisher( transaction_executor=transaction_executor, block_cache=block_cache, state_view_factory=state_view_factory, settings_cache=settings_cache, block_sender=block_sender, batch_sender=batch_sender, chain_head=block_store.chain_head, identity_signer=identity_signer, data_dir=data_dir, config_dir=config_dir, permission_verifier=permission_verifier, check_publish_block_frequency=0.1, batch_observers=[batch_tracker], batch_injector_factory=batch_injector_factory) block_publisher_batch_sender = block_publisher.batch_sender() block_validator = BlockValidator( block_cache=block_cache, state_view_factory=state_view_factory, transaction_executor=transaction_executor, identity_signer=identity_signer, data_dir=data_dir, config_dir=config_dir, permission_verifier=permission_verifier) chain_controller = ChainController( block_store=block_store, block_cache=block_cache, block_validator=block_validator, state_database=global_state_db, chain_head_lock=block_publisher.chain_head_lock, state_pruning_block_depth=state_pruning_block_depth, data_dir=data_dir, observers=[ event_broadcaster, receipt_store, batch_tracker, identity_observer, settings_observer ]) genesis_controller = GenesisController( context_manager=context_manager, transaction_executor=transaction_executor, completer=completer, block_store=block_store, state_view_factory=state_view_factory, identity_signer=identity_signer, data_dir=data_dir, config_dir=config_dir, chain_id_manager=chain_id_manager, batch_sender=batch_sender) responder = Responder(completer) completer.set_on_batch_received(block_publisher_batch_sender.send) completer.set_on_block_received(chain_controller.queue_block) completer.set_chain_has_block(chain_controller.has_block) # -- Register Message Handler -- # network_handlers.add(network_dispatcher, network_service, gossip, completer, responder, network_thread_pool, sig_pool, chain_controller.has_block, block_publisher.has_batch, permission_verifier, block_publisher, consensus_notifier) component_handlers.add(component_dispatcher, gossip, context_manager, transaction_executor, completer, block_store, batch_tracker, global_state_db, self.get_chain_head_state_root_hash, receipt_store, event_broadcaster, permission_verifier, component_thread_pool, client_thread_pool, sig_pool, block_publisher) # -- Store Object References -- # self._component_dispatcher = component_dispatcher self._component_service = component_service self._component_thread_pool = component_thread_pool self._network_dispatcher = network_dispatcher self._network_service = network_service self._network_thread_pool = network_thread_pool consensus_proxy = ConsensusProxy( block_cache=block_cache, chain_controller=chain_controller, block_publisher=block_publisher, gossip=gossip, identity_signer=identity_signer, settings_view_factory=SettingsViewFactory(state_view_factory), state_view_factory=state_view_factory) consensus_handlers.add(consensus_dispatcher, consensus_thread_pool, consensus_proxy) self._consensus_dispatcher = consensus_dispatcher self._consensus_service = consensus_service self._consensus_thread_pool = consensus_thread_pool self._client_thread_pool = client_thread_pool self._sig_pool = sig_pool self._context_manager = context_manager self._transaction_executor = transaction_executor self._genesis_controller = genesis_controller self._gossip = gossip self._block_publisher = block_publisher self._chain_controller = chain_controller self._block_validator = block_validator
class TestChainControllerGenesisPeer(unittest.TestCase): def setUp(self): self.block_tree_manager = BlockTreeManager(with_genesis=False) self.gossip = MockNetwork() self.executor = SynchronousExecutor() self.txn_executor = MockTransactionExecutor() self.block_sender = MockBlockSender() self.chain_id_manager = MockChainIdManager() self.state_delta_processor = MockStateDeltaProcessor() self.chain_head_lock = RLock() def chain_updated(head, committed_batches=None, uncommitted_batches=None): pass self.chain_ctrl = ChainController( block_cache=self.block_tree_manager.block_cache, state_view_factory=MockStateViewFactory( self.block_tree_manager.state_db), block_sender=self.block_sender, executor=self.executor, transaction_executor=MockTransactionExecutor(), chain_head_lock=self.chain_head_lock, on_chain_updated=chain_updated, squash_handler=None, chain_id_manager=self.chain_id_manager, state_delta_processor=self.state_delta_processor, identity_signing_key=self.block_tree_manager.identity_signing_key, data_dir=None, config_dir=None) self.assertIsNone(self.chain_ctrl.chain_head) def test_genesis_block_mismatch(self): '''Test mismatch block chain id will drop genesis block. Given a ChainController with an empty chain mismatches the block-chain-id stored on disk. ''' self.chain_id_manager.save_block_chain_id('my_chain_id') some_other_genesis_block = \ self.block_tree_manager.generate_genesis_block() self.chain_ctrl.on_block_received(some_other_genesis_block) self.assertIsNone(self.chain_ctrl.chain_head) def test_genesis_block_matches_block_chain_id(self): '''Test that a validator with no chain will accept a valid genesis block that matches the block-chain-id stored on disk. ''' my_genesis_block = self.block_tree_manager.generate_genesis_block() chain_id = my_genesis_block.header_signature self.chain_id_manager.save_block_chain_id(chain_id) with patch.object(BlockValidator, 'validate_block', return_value=True): self.chain_ctrl.on_block_received(my_genesis_block) self.assertIsNotNone(self.chain_ctrl.chain_head) chain_head_sig = self.chain_ctrl.chain_head.header_signature self.assertEqual( chain_head_sig[:8], chain_id[:8], 'Chain id does not match') self.assertEqual(chain_id, self.chain_id_manager.get_block_chain_id()) def test_invalid_genesis_block_matches_block_chain_id(self): '''Test that a validator with no chain will drop an invalid genesis block that matches the block-chain-id stored on disk. ''' my_genesis_block = self.block_tree_manager.generate_genesis_block() chain_id = my_genesis_block.header_signature self.chain_id_manager.save_block_chain_id(chain_id) with patch.object(BlockValidator, 'validate_block', return_value=False): self.chain_ctrl.on_block_received(my_genesis_block) self.assertIsNone(self.chain_ctrl.chain_head)
class TestChainController(unittest.TestCase): def setUp(self): self.block_tree_manager = BlockTreeManager() self.gossip = MockNetwork() self.executor = SynchronousExecutor() self.txn_executor = MockTransactionExecutor() self.block_sender = MockBlockSender() self.chain_id_manager = MockChainIdManager() self._chain_head_lock = RLock() self.state_delta_processor = MockStateDeltaProcessor() def chain_updated(head, committed_batches=None, uncommitted_batches=None): pass self.chain_ctrl = ChainController( block_cache=self.block_tree_manager.block_cache, state_view_factory=MockStateViewFactory( self.block_tree_manager.state_db), block_sender=self.block_sender, executor=self.executor, transaction_executor=MockTransactionExecutor(), chain_head_lock=self._chain_head_lock, on_chain_updated=chain_updated, squash_handler=None, chain_id_manager=self.chain_id_manager, state_delta_processor=self.state_delta_processor, identity_signing_key=self.block_tree_manager.identity_signing_key, data_dir=None, config_dir=None) init_root = self.chain_ctrl.chain_head self.assert_is_chain_head(init_root) # create a chain of length 5 extending the root _, head = self.generate_chain(init_root, 5) self.receive_and_process_blocks(head) self.assert_is_chain_head(head) self.init_head = head def test_simple_case(self): new_block = self.generate_block(self.init_head) self.receive_and_process_blocks(new_block) self.assert_is_chain_head(new_block) # validate that the deltas for the new block are published self.assertEqual(new_block, self.state_delta_processor.block) def test_alternate_genesis(self): '''Tests a fork extending an alternate genesis block ''' chain, head = self.generate_chain(None, 5) for block in chain: self.receive_and_process_blocks(block) # make sure initial head is still chain head self.assert_is_chain_head(self.init_head) def test_bad_blocks(self): '''Tests bad blocks extending current chain ''' # Bad due to consensus bad_consen = self.generate_block( previous_block=self.init_head, invalid_consensus=True) # chain head should be the same self.receive_and_process_blocks(bad_consen) self.assert_is_chain_head(self.init_head) # Bad due to transaction bad_batch = self.generate_block( previous_block=self.init_head, invalid_batch=True) # chain head should be the same self.receive_and_process_blocks(bad_batch) self.assert_is_chain_head(self.init_head) # # Ensure good block works good_block = self.generate_block( previous_block=self.init_head) # chain head should be good_block self.receive_and_process_blocks(good_block) self.assert_is_chain_head(good_block) def test_fork_weights(self): '''Tests extending blocks of different weights ''' weight_4 = self.generate_block( previous_block=self.init_head, weight=4) weight_7 = self.generate_block( previous_block=self.init_head, weight=7) weight_8 = self.generate_block( previous_block=self.init_head, weight=8) self.receive_and_process_blocks( weight_7, weight_4, weight_8) self.assert_is_chain_head(weight_8) def test_fork_lengths(self): '''Tests competing forks of different lengths ''' _, head_2 = self.generate_chain(self.init_head, 2) _, head_7 = self.generate_chain(self.init_head, 7) _, head_5 = self.generate_chain(self.init_head, 5) self.receive_and_process_blocks( head_2, head_7, head_5) self.assert_is_chain_head(head_7) def test_advancing_chain(self): '''Tests the chain being advanced between a fork's creation and validation ''' _, fork_5 = self.generate_chain(self.init_head, 5) _, fork_3 = self.generate_chain(self.init_head, 3) self.receive_and_process_blocks(fork_3) self.assert_is_chain_head(fork_3) # fork_5 is longer than fork_3, so it should be accepted self.receive_and_process_blocks(fork_5) self.assert_is_chain_head(fork_5) def test_fork_missing_block(self): '''Tests a fork with a missing block ''' # make new chain new_chain, new_head = self.generate_chain(self.init_head, 5) self.chain_ctrl.on_block_received(new_head) # delete a block from the new chain del self.chain_ctrl._block_cache[new_chain[3].identifier] self.executor.process_all() # chain shouldn't advance self.assert_is_chain_head(self.init_head) # try again, chain still shouldn't advance self.receive_and_process_blocks(new_head) self.assert_is_chain_head(self.init_head) def test_fork_bad_block(self): '''Tests a fork with a bad block in the middle ''' # make two chains extending chain good_chain, good_head = self.generate_chain(self.init_head, 5) bad_chain, bad_head = self.generate_chain(self.init_head, 5) self.chain_ctrl.on_block_received(bad_head) self.chain_ctrl.on_block_received(good_head) # invalidate block in the middle of bad_chain bad_chain[3].status = BlockStatus.Invalid self.executor.process_all() # good_chain should be accepted self.assert_is_chain_head(good_head) def test_advancing_fork(self): '''Tests a fork advancing before getting validated ''' _, fork_head = self.generate_chain(self.init_head, 5) self.chain_ctrl.on_block_received(fork_head) # advance fork before it gets accepted _, ext_head = self.generate_chain(fork_head, 3) self.executor.process_all() self.assert_is_chain_head(fork_head) self.receive_and_process_blocks(ext_head) self.assert_is_chain_head(ext_head) def test_block_extends_in_validation(self): '''Tests a block getting extended while being validated ''' # create candidate block candidate = self.block_tree_manager.generate_block( previous_block=self.init_head) self.assert_is_chain_head(self.init_head) # queue up the candidate block, but don't process self.chain_ctrl.on_block_received(candidate) # create a new block extending the candidate block extending_block = self.block_tree_manager.generate_block( previous_block=candidate) self.assert_is_chain_head(self.init_head) # queue and process the extending block, # which should be the new head self.receive_and_process_blocks(extending_block) self.assert_is_chain_head(extending_block) def test_multiple_extended_forks(self): '''A more involved example of competing forks Three forks of varying lengths a_0, b_0, and c_0 are created extending the existing chain, with c_0 being the longest initially. The chains are extended in the following sequence: 1. Extend all forks by 2. The c fork should remain the head. 2. Extend forks by lenths such that the b fork is the longest. It should be the new head. 3. Extend all forks by 8. The b fork should remain the head. 4. Create a new fork of the initial chain longer than any of the other forks. It should be the new head. ''' # create forks of various lengths _, a_0 = self.generate_chain(self.init_head, 3) _, b_0 = self.generate_chain(self.init_head, 5) _, c_0 = self.generate_chain(self.init_head, 7) self.receive_and_process_blocks(a_0, b_0, c_0) self.assert_is_chain_head(c_0) # extend every fork by 2 _, a_1 = self.generate_chain(a_0, 2) _, b_1 = self.generate_chain(b_0, 2) _, c_1 = self.generate_chain(c_0, 2) self.receive_and_process_blocks(a_1, b_1, c_1) self.assert_is_chain_head(c_1) # extend the forks by different lengths _, a_2 = self.generate_chain(a_1, 1) _, b_2 = self.generate_chain(b_1, 6) _, c_2 = self.generate_chain(c_1, 3) self.receive_and_process_blocks(a_2, b_2, c_2) self.assert_is_chain_head(b_2) # extend every fork by 2 _, a_3 = self.generate_chain(a_2, 8) _, b_3 = self.generate_chain(b_2, 8) _, c_3 = self.generate_chain(c_2, 8) self.receive_and_process_blocks(a_3, b_3, c_3) self.assert_is_chain_head(b_3) # create a new longest chain _, wow = self.generate_chain(self.init_head, 30) self.receive_and_process_blocks(wow) self.assert_is_chain_head(wow) # next multi threaded # next add block publisher # next batch lists # integrate with LMDB # early vs late binding ( class member of consensus BlockPublisher) # helpers def assert_is_chain_head(self, block): chain_head_sig = self.chain_ctrl.chain_head.header_signature block_sig = block.header_signature self.assertEqual( chain_head_sig[:8], block_sig[:8], 'Not chain head') def generate_chain(self, root_block, num_blocks, params={'add_to_cache': True}): '''Returns (chain, chain_head). Usually only the head is needed, but occasionally the chain itself is used. ''' chain = self.block_tree_manager.generate_chain( root_block, num_blocks, params) head = chain[-1] return chain, head def generate_block(self, *args, **kwargs): return self.block_tree_manager.generate_block( *args, **kwargs) def receive_and_process_blocks(self, *blocks): for block in blocks: self.chain_ctrl.on_block_received(block) self.executor.process_all()
class TestChainController(unittest.TestCase): def setUp(self): self.block_tree_manager = BlockTreeManager() self.gossip = MockNetwork() self.executor = SynchronousExecutor() self.txn_executor = MockTransactionExecutor() self.block_sender = MockBlockSender() self.chain_id_manager = MockChainIdManager() self._chain_head_lock = RLock() self.state_delta_processor = MockStateDeltaProcessor() def chain_updated(head, committed_batches=None, uncommitted_batches=None): pass self.chain_ctrl = ChainController( block_cache=self.block_tree_manager.block_cache, state_view_factory=MockStateViewFactory( self.block_tree_manager.state_db), block_sender=self.block_sender, executor=self.executor, transaction_executor=MockTransactionExecutor( batch_execution_result=None), chain_head_lock=self._chain_head_lock, on_chain_updated=chain_updated, squash_handler=None, chain_id_manager=self.chain_id_manager, state_delta_processor=self.state_delta_processor, identity_signing_key=self.block_tree_manager.identity_signing_key, data_dir=None, config_dir=None) init_root = self.chain_ctrl.chain_head self.assert_is_chain_head(init_root) # create a chain of length 5 extending the root _, head = self.generate_chain(init_root, 5) self.receive_and_process_blocks(head) self.assert_is_chain_head(head) self.init_head = head def test_simple_case(self): new_block = self.generate_block(self.init_head) self.receive_and_process_blocks(new_block) self.assert_is_chain_head(new_block) # validate that the deltas for the new block are published self.assertEqual(new_block, self.state_delta_processor.block) def test_alternate_genesis(self): '''Tests a fork extending an alternate genesis block ''' chain, head = self.generate_chain(None, 5) for block in chain: self.receive_and_process_blocks(block) # make sure initial head is still chain head self.assert_is_chain_head(self.init_head) def test_bad_blocks(self): '''Tests bad blocks extending current chain ''' # Bad due to consensus bad_consen = self.generate_block( previous_block=self.init_head, invalid_consensus=True) # chain head should be the same self.receive_and_process_blocks(bad_consen) self.assert_is_chain_head(self.init_head) # Bad due to transaction bad_batch = self.generate_block( previous_block=self.init_head, invalid_batch=True) # chain head should be the same self.receive_and_process_blocks(bad_batch) self.assert_is_chain_head(self.init_head) # # Ensure good block works good_block = self.generate_block( previous_block=self.init_head) # chain head should be good_block self.receive_and_process_blocks(good_block) self.assert_is_chain_head(good_block) def test_fork_weights(self): '''Tests extending blocks of different weights ''' weight_4 = self.generate_block( previous_block=self.init_head, weight=4) weight_7 = self.generate_block( previous_block=self.init_head, weight=7) weight_8 = self.generate_block( previous_block=self.init_head, weight=8) self.receive_and_process_blocks( weight_7, weight_4, weight_8) self.assert_is_chain_head(weight_8) def test_fork_lengths(self): '''Tests competing forks of different lengths ''' _, head_2 = self.generate_chain(self.init_head, 2) _, head_7 = self.generate_chain(self.init_head, 7) _, head_5 = self.generate_chain(self.init_head, 5) self.receive_and_process_blocks( head_2, head_7, head_5) self.assert_is_chain_head(head_7) def test_advancing_chain(self): '''Tests the chain being advanced between a fork's creation and validation ''' _, fork_5 = self.generate_chain(self.init_head, 5) _, fork_3 = self.generate_chain(self.init_head, 3) self.receive_and_process_blocks(fork_3) self.assert_is_chain_head(fork_3) # fork_5 is longer than fork_3, so it should be accepted self.receive_and_process_blocks(fork_5) self.assert_is_chain_head(fork_5) def test_fork_missing_block(self): '''Tests a fork with a missing block ''' # make new chain new_chain, new_head = self.generate_chain(self.init_head, 5) self.chain_ctrl.on_block_received(new_head) # delete a block from the new chain del self.chain_ctrl._block_cache[new_chain[3].identifier] self.executor.process_all() # chain shouldn't advance self.assert_is_chain_head(self.init_head) # try again, chain still shouldn't advance self.receive_and_process_blocks(new_head) self.assert_is_chain_head(self.init_head) def test_fork_bad_block(self): '''Tests a fork with a bad block in the middle ''' # make two chains extending chain good_chain, good_head = self.generate_chain(self.init_head, 5) bad_chain, bad_head = self.generate_chain(self.init_head, 5) self.chain_ctrl.on_block_received(bad_head) self.chain_ctrl.on_block_received(good_head) # invalidate block in the middle of bad_chain bad_chain[3].status = BlockStatus.Invalid self.executor.process_all() # good_chain should be accepted self.assert_is_chain_head(good_head) def test_advancing_fork(self): '''Tests a fork advancing before getting validated ''' _, fork_head = self.generate_chain(self.init_head, 5) self.chain_ctrl.on_block_received(fork_head) # advance fork before it gets accepted _, ext_head = self.generate_chain(fork_head, 3) self.executor.process_all() self.assert_is_chain_head(fork_head) self.receive_and_process_blocks(ext_head) self.assert_is_chain_head(ext_head) def test_block_extends_in_validation(self): '''Tests a block getting extended while being validated ''' # create candidate block candidate = self.block_tree_manager.generate_block( previous_block=self.init_head) self.assert_is_chain_head(self.init_head) # queue up the candidate block, but don't process self.chain_ctrl.on_block_received(candidate) # create a new block extending the candidate block extending_block = self.block_tree_manager.generate_block( previous_block=candidate) self.assert_is_chain_head(self.init_head) # queue and process the extending block, # which should be the new head self.receive_and_process_blocks(extending_block) self.assert_is_chain_head(extending_block) def test_multiple_extended_forks(self): '''A more involved example of competing forks Three forks of varying lengths a_0, b_0, and c_0 are created extending the existing chain, with c_0 being the longest initially. The chains are extended in the following sequence: 1. Extend all forks by 2. The c fork should remain the head. 2. Extend forks by lenths such that the b fork is the longest. It should be the new head. 3. Extend all forks by 8. The b fork should remain the head. 4. Create a new fork of the initial chain longer than any of the other forks. It should be the new head. ''' # create forks of various lengths _, a_0 = self.generate_chain(self.init_head, 3) _, b_0 = self.generate_chain(self.init_head, 5) _, c_0 = self.generate_chain(self.init_head, 7) self.receive_and_process_blocks(a_0, b_0, c_0) self.assert_is_chain_head(c_0) # extend every fork by 2 _, a_1 = self.generate_chain(a_0, 2) _, b_1 = self.generate_chain(b_0, 2) _, c_1 = self.generate_chain(c_0, 2) self.receive_and_process_blocks(a_1, b_1, c_1) self.assert_is_chain_head(c_1) # extend the forks by different lengths _, a_2 = self.generate_chain(a_1, 1) _, b_2 = self.generate_chain(b_1, 6) _, c_2 = self.generate_chain(c_1, 3) self.receive_and_process_blocks(a_2, b_2, c_2) self.assert_is_chain_head(b_2) # extend every fork by 2 _, a_3 = self.generate_chain(a_2, 8) _, b_3 = self.generate_chain(b_2, 8) _, c_3 = self.generate_chain(c_2, 8) self.receive_and_process_blocks(a_3, b_3, c_3) self.assert_is_chain_head(b_3) # create a new longest chain _, wow = self.generate_chain(self.init_head, 30) self.receive_and_process_blocks(wow) self.assert_is_chain_head(wow) # next multi threaded # next add block publisher # next batch lists # integrate with LMDB # early vs late binding ( class member of consensus BlockPublisher) # helpers def assert_is_chain_head(self, block): chain_head_sig = self.chain_ctrl.chain_head.header_signature block_sig = block.header_signature self.assertEqual( chain_head_sig[:8], block_sig[:8], 'Not chain head') def generate_chain(self, root_block, num_blocks, params={'add_to_cache': True}): '''Returns (chain, chain_head). Usually only the head is needed, but occasionally the chain itself is used. ''' chain = self.block_tree_manager.generate_chain( root_block, num_blocks, params) head = chain[-1] return chain, head def generate_block(self, *args, **kwargs): return self.block_tree_manager.generate_block( *args, **kwargs) def receive_and_process_blocks(self, *blocks): for block in blocks: self.chain_ctrl.on_block_received(block) self.executor.process_all()
def do_publish_block(): btm = BlockTreeManager() block_publisher = None chain_controller = None try: block_publisher = BlockPublisher( transaction_executor=testJournal.txn_executor, block_cache=btm.block_cache, state_view_factory=MockStateViewFactory(btm.state_db), settings_cache=SettingsCache( SettingsViewFactory(btm.state_view_factory), ), block_sender=testJournal.block_sender, batch_sender=testJournal.batch_sender, squash_handler=None, chain_head=btm.block_store.chain_head, identity_signer=btm.identity_signer, data_dir=None, config_dir=None, permission_verifier=testJournal.permission_verifier, check_publish_block_frequency=0.1, batch_observers=[], batch_injector_factory=DefaultBatchInjectorFactory( block_store=btm.block_store, state_view_factory=MockStateViewFactory(btm.state_db), signer=btm.identity_signer)) block_validator = BlockValidator( state_view_factory=MockStateViewFactory(btm.state_db), block_cache=btm.block_cache, transaction_executor=testJournal.txn_executor, squash_handler=None, identity_signer=btm.identity_signer, data_dir=None, config_dir=None, permission_verifier=testJournal.permission_verifier) chain_controller = ChainController( block_cache=btm.block_cache, block_validator=block_validator, state_view_factory=MockStateViewFactory(btm.state_db), chain_head_lock=block_publisher.chain_head_lock, on_chain_updated=block_publisher.on_chain_updated, chain_id_manager=None, data_dir=None, config_dir=None, chain_observers=[]) testJournal.gossip.on_batch_received = block_publisher.queue_batch testJournal.gossip.on_block_received = chain_controller.queue_block block_publisher.start() chain_controller.start() # feed it a batch batch = Batch() block_publisher.queue_batch(batch) wait_until(lambda: testJournal.block_sender.new_block is not None, 2) assert testJournal.block_sender.new_block != None block = BlockWrapper(testJournal.block_sender.new_block) chain_controller.queue_block(block) # wait for the chain_head to be updated. wait_until(lambda: btm.chain_head.identifier == block.identifier, 2) # assert btm.chain_head.identifier == block.identifier finally: if block_publisher is not None: block_publisher.stop() if chain_controller is not None: chain_controller.stop() if block_validator is not None: block_validator.stop()
class TestChainController(): def __init__(self): self.block_tree_manager = BlockTreeManager() self.gossip = MockNetwork() self.txn_executor = MockTransactionExecutor() self.block_sender = MockBlockSender() self.chain_id_manager = MockChainIdManager() self._chain_head_lock = RLock() self.permission_verifier = MockPermissionVerifier() self.state_view_factory = MockStateViewFactory( self.block_tree_manager.state_db) self.transaction_executor = MockTransactionExecutor( batch_execution_result=None) self.executor = SynchronousExecutor() self.block_validator = BlockValidator( state_view_factory=self.state_view_factory, block_cache=self.block_tree_manager.block_cache, transaction_executor=self.transaction_executor, squash_handler=None, identity_signer=self.block_tree_manager.identity_signer, data_dir=None, config_dir=None, permission_verifier=self.permission_verifier, thread_pool=self.executor) def chain_updated(head, committed_batches=None, uncommitted_batches=None): pass self.chain_ctrl = ChainController( block_cache=self.block_tree_manager.block_cache, block_validator=self.block_validator, state_view_factory=self.state_view_factory, chain_head_lock=self._chain_head_lock, on_chain_updated=chain_updated, chain_id_manager=self.chain_id_manager, data_dir=None, config_dir=None, chain_observers=[]) init_root = self.chain_ctrl.chain_head self.assert_is_chain_head(init_root) # create a chain of length 5 extending the root _, head = self.generate_chain(init_root, 5) self.receive_and_process_blocks(head) self.assert_is_chain_head(head) self.init_head = head # next multi threaded # next add block publisher # next batch lists # integrate with LMDB # early vs late binding ( class member of consensus BlockPublisher) # helpers def assert_is_chain_head(self, block): chain_head_sig = self.chain_ctrl.chain_head.header_signature block_sig = block.header_signature # assert chain_head_sig[:8] == block_sig[:8] def generate_chain(self, root_block, num_blocks, params=None): '''Returns (chain, chain_head). Usually only the head is needed, but occasionally the chain itself is used. ''' if params is None: params = {'add_to_cache': True} chain = self.block_tree_manager.generate_chain(root_block, num_blocks, params) head = chain[-1] return chain, head def generate_block(self, *args, **kwargs): return self.block_tree_manager.generate_block(*args, **kwargs) def receive_and_process_blocks(self, *blocks): for block in blocks: self.chain_ctrl.on_block_received(block) self.executor.process_all()
class TestChainControllerGenesisPeer(unittest.TestCase): def setUp(self): self.block_tree_manager = BlockTreeManager(with_genesis=False) self.gossip = MockNetwork() self.executor = SynchronousExecutor() self.txn_executor = MockTransactionExecutor() self.block_sender = MockBlockSender() self.chain_id_manager = MockChainIdManager() self.state_delta_processor = MockStateDeltaProcessor() self.chain_head_lock = RLock() def chain_updated(head, committed_batches=None, uncommitted_batches=None): pass self.chain_ctrl = ChainController( block_cache=self.block_tree_manager.block_cache, state_view_factory=MockStateViewFactory( self.block_tree_manager.state_db), block_sender=self.block_sender, executor=self.executor, transaction_executor=MockTransactionExecutor(), chain_head_lock=self.chain_head_lock, on_chain_updated=chain_updated, squash_handler=None, chain_id_manager=self.chain_id_manager, state_delta_processor=self.state_delta_processor, identity_signing_key=self.block_tree_manager.identity_signing_key, data_dir=None, config_dir=None) self.assertIsNone(self.chain_ctrl.chain_head) def test_genesis_block_mismatch(self): '''Test mismatch block chain id will drop genesis block. Given a ChainController with an empty chain mismatches the block-chain-id stored on disk. ''' self.chain_id_manager.save_block_chain_id('my_chain_id') some_other_genesis_block = \ self.block_tree_manager.generate_genesis_block() self.chain_ctrl.on_block_received(some_other_genesis_block) self.assertIsNone(self.chain_ctrl.chain_head) def test_genesis_block_matches_block_chain_id(self): '''Test that a validator with no chain will accept a valid genesis block that matches the block-chain-id stored on disk. ''' my_genesis_block = self.block_tree_manager.generate_genesis_block() chain_id = my_genesis_block.header_signature self.chain_id_manager.save_block_chain_id(chain_id) with patch.object(BlockValidator, 'validate_block', return_value=True): self.chain_ctrl.on_block_received(my_genesis_block) self.assertIsNotNone(self.chain_ctrl.chain_head) chain_head_sig = self.chain_ctrl.chain_head.header_signature self.assertEqual( chain_head_sig[:8], chain_id[:8], 'Chain id does not match') self.assertEqual(chain_id, self.chain_id_manager.get_block_chain_id()) def test_invalid_genesis_block_matches_block_chain_id(self): '''Test that a validator with no chain will drop an invalid genesis block that matches the block-chain-id stored on disk. ''' my_genesis_block = self.block_tree_manager.generate_genesis_block() chain_id = my_genesis_block.header_signature self.chain_id_manager.save_block_chain_id(chain_id) with patch.object(BlockValidator, 'validate_block', return_value=False): self.chain_ctrl.on_block_received(my_genesis_block) self.assertIsNone(self.chain_ctrl.chain_head)
class TestChainController(unittest.TestCase): def setUp(self): self.blocks = BlockTreeManager() self.gossip = MockNetwork() self.executor = SynchronousExecutor() self.txn_executor = MockTransactionExecutor() def chain_updated(head): pass self.chain_ctrl = ChainController( consensus=TestModeVerifier(), block_store=self.blocks.block_store, send_message=self.gossip.send_message, executor=self.executor, transaction_executor=MockTransactionExecutor(), on_chain_updated=chain_updated, squash_handler=None) def test_simple_case(self): # TEST Run the simple case block_1 = self.blocks.generate_block(self.blocks.chain_head) self.chain_ctrl.on_block_received(block_1.get_block()) self.executor.process_all() assert (self.chain_ctrl.chain_head.block.header_signature == block_1.header_signature) def test_alternate_genesis(self): # TEST Run generate and alternate genesis block head = self.chain_ctrl.chain_head other_genesis = self.blocks.generate_block(add_to_store=True) for b in self.blocks.generate_chain(other_genesis, 5): self.chain_ctrl.on_block_received(b.get_block()) self.executor.process_all() assert (self.chain_ctrl.chain_head.block.header_signature == head.block.header_signature) def test_bad_block_signature(self): # TEST Bad block extending current chain # Bad due to signature head = self.blocks.chain_head block_bad = self.blocks.generate_block(self.blocks.chain_head.block, invalid_signature=True) self.chain_ctrl.on_block_received(block_bad.get_block()) assert (self.chain_ctrl.chain_head.block.header_signature == head.block.header_signature) def test_bad_block_consensus(self): # Bad due to consensus pass def test_bad_block_transaction(self): # Bad due to transaction pass def test_missing_block(self): # TEST Missing block G->missing->B head = self.blocks.chain_head new_blocks = self.blocks.generate_chain(head, 2) self.chain_ctrl.on_block_received(new_blocks[1].get_block()) self.executor.process_all() assert (len(self.gossip.messages) == 1) block_id = self.gossip.messages[0] assert (block_id == new_blocks[0].header_signature) self.gossip.clear() self.chain_ctrl.on_block_received(new_blocks[0].get_block()) self.executor.process_all() assert (self.chain_ctrl.chain_head.block.header_signature == new_blocks[1].header_signature) def test_missing_block_invalid_head(self): # TEST Missing block G->missing->B # B is invalid but Missing is valid head = self.blocks.chain_head new_blocks_def = self.blocks.generate_chain_definition(2) new_blocks_def[1]["invalid_signature"] = True new_blocks = self.blocks.generate_chain(head, new_blocks_def) self.chain_ctrl.on_block_received(new_blocks[1].get_block()) self.executor.process_all() assert (len(self.gossip.messages) == 1) block_id = self.gossip.messages[0] assert (block_id == new_blocks[0].header_signature) self.gossip.clear() self.chain_ctrl.on_block_received(new_blocks[0].get_block()) self.executor.process_all() pp.pprint(new_blocks) pp.pprint(self.blocks.block_store)