class TestBlockPublisher(unittest.TestCase): ''' The block publisher has three main functions, and in these tests those functions are given the following wrappers for convenience: * on_batch_received -> receive_batches * on_chain_updated -> update_chain_head * on_check_publish_block -> publish_block After publishing a block, publish_block sends its block to the mock block sender, and that block is named result_block. This block is what is checked by the test assertions. The basic pattern for the publisher tests (with variations) is: 0) make a list of batches (usually in setUp); 1) receive the batches; 2) publish a block; 3) verify the block (checking that it contains the correct batches, or checking that it doesn't exist, or whatever). The publisher chain head might be updated several times in a test. ''' def setUp(self): self.block_tree_manager = BlockTreeManager() self.block_sender = MockBlockSender() self.batch_sender = MockBatchSender() self.state_view_factory = MockStateViewFactory({}) self.publisher = BlockPublisher( transaction_executor=MockTransactionExecutor(), block_cache=self.block_tree_manager.block_cache, state_view_factory=self.state_view_factory, block_sender=self.block_sender, batch_sender=self.batch_sender, squash_handler=None, chain_head=self.block_tree_manager.chain_head, identity_signing_key=self.block_tree_manager.identity_signing_key, data_dir=None, config_dir=None) self.init_chain_head = self.block_tree_manager.chain_head self.result_block = None # A list of batches is created at the beginning of each test. # The test assertions and the publisher function wrappers # take these batches as a default argument. self.batch_count = 8 self.batches = self.make_batches() def test_publish(self): ''' Publish a block with several batches ''' self.receive_batches() self.publish_block() self.verify_block() def test_reject_duplicate_batches_from_receive(self): ''' Test that duplicate batches from on_batch_received are rejected ''' for _ in range(5): self.receive_batches() self.publish_block() self.verify_block() def test_reject_duplicate_batches_from_store(self): ''' Test that duplicate batches from block store are rejected ''' self.update_chain_head(None) self.update_chain_head( head=self.init_chain_head, uncommitted=self.batches) self.receive_batches() self.publish_block() self.verify_block() def test_no_chain_head(self): ''' Test that nothing gets published with a null chain head, then test that publishing resumes after updating ''' self.update_chain_head(None) self.receive_batches() # try to publish block (failing) self.publish_block() self.assert_no_block_published() # reset chain head several times, # making sure batches remain queued for _ in range(3): self.update_chain_head(None) self.update_chain_head(self.init_chain_head) # try to publish block (succeeding) self.publish_block() self.verify_block() def test_committed_batches(self): ''' Test that batches committed upon updating the chain head are not included in the next block. ''' self.update_chain_head(None) self.update_chain_head( head=self.init_chain_head, committed=self.batches) new_batches = self.make_batches(batch_count=12) self.receive_batches(new_batches) self.publish_block() self.verify_block(new_batches) def test_uncommitted_batches(self): ''' Test that batches uncommitted upon updating the chain head are included in the next block. ''' self.update_chain_head(None) self.update_chain_head( head=self.init_chain_head, uncommitted=self.batches) self.publish_block() self.verify_block() def test_empty_pending_queue(self): ''' Test that no block is published if the pending queue is empty ''' # try to publish with no pending queue (failing) self.publish_block() self.assert_no_block_published() # receive batches, then try again (succeeding) self.receive_batches() self.publish_block() self.verify_block() def test_missing_dependencies(self): ''' Test that no block is published with missing dependencies ''' self.batches = self.make_batches( missing_deps=True) self.receive_batches() self.publish_block() self.assert_no_block_published() def test_batches_rejected_by_scheduler(self): ''' Test that no block is published with batches rejected by the scheduler ''' self.publisher = BlockPublisher( transaction_executor=MockTransactionExecutor( batch_execution_result=False), block_cache=self.block_tree_manager.block_cache, state_view_factory=self.state_view_factory, block_sender=self.block_sender, batch_sender=self.batch_sender, squash_handler=None, chain_head=self.block_tree_manager.chain_head, identity_signing_key=self.block_tree_manager.identity_signing_key, data_dir=None, config_dir=None) self.receive_batches() self.publish_block() self.assert_no_block_published() def test_max_block_size(self): ''' Test block publisher obeys the block size limits ''' # Create a publisher that has a state view # with a batch limit addr, value = CreateSetting( 'sawtooth.publisher.max_batches_per_block', 1) print('test', addr) self.state_view_factory = MockStateViewFactory( {addr: value}) self.publisher = BlockPublisher( transaction_executor=MockTransactionExecutor(), block_cache=self.block_tree_manager.block_cache, state_view_factory=self.state_view_factory, block_sender=self.block_sender, batch_sender=self.batch_sender, squash_handler=None, chain_head=self.block_tree_manager.chain_head, identity_signing_key=self.block_tree_manager.identity_signing_key, data_dir=None, config_dir=None) self.assert_no_block_published() # receive batches, then try again (succeeding) self.receive_batches() # try to publish with no pending queue (failing) for i in range(self.batch_count): self.publish_block() self.assert_block_published() self.update_chain_head(BlockWrapper(self.result_block)) self.verify_block([self.batches[i]]) def test_duplicate_transactions(self): ''' Test discards batches that have duplicate transactions in them. ''' # receive batches, then try again (succeeding) self.batches = self.batches[1:2] self.receive_batches() self.publish_block() self.assert_block_published() self.update_chain_head(BlockWrapper(self.result_block)) self.verify_block() # build a new set of batches with the same transactions in them self.batches = self.make_batches_with_duplicate_txn() self.receive_batches() self.publish_block() self.assert_no_block_published() # block should be empty after batch # with duplicate transaction is dropped. # assertions def assert_block_published(self): self.assertIsNotNone( self.result_block, 'Block should have been published') def assert_no_block_published(self): self.assertIsNone( self.result_block, 'Block should not have been published') def assert_batch_in_block(self, batch): self.assertIn( batch, tuple(self.result_block.batches), 'Batch not in block') def assert_batches_in_block(self, batches=None): if batches is None: batches = self.batches for batch in batches: self.assert_batch_in_block(batch) def assert_block_batch_count(self, batch_count=None): if batch_count is None: batch_count = self.batch_count self.assertEqual( len(self.result_block.batches), batch_count, 'Wrong batch count in block') def verify_block(self, batches=None): if batches is None: batches = self.batches batch_count = None if batches is None else len(batches) self.assert_block_published() self.assert_batches_in_block(batches) self.assert_block_batch_count(batch_count) self.result_block = None # publisher functions def receive_batch(self, batch): self.publisher.on_batch_received(batch) def receive_batches(self, batches=None): if batches is None: batches = self.batches for batch in batches: self.receive_batch(batch) def publish_block(self): self.publisher.on_check_publish_block() self.result_block = self.block_sender.new_block self.block_sender.new_block = None def update_chain_head(self, head, committed=None, uncommitted=None): if head: self.block_tree_manager.block_store.update_chain([head]) self.publisher.on_chain_updated( chain_head=head, committed_batches=committed, uncommitted_batches=uncommitted) # batches def make_batch(self, missing_deps=False): return self.block_tree_manager.generate_batch( missing_deps=missing_deps) def make_batches(self, batch_count=None, missing_deps=False): if batch_count is None: batch_count = self.batch_count return [self.make_batch(missing_deps=missing_deps) for _ in range(batch_count)] def make_batches_with_duplicate_txn(self): txns = [self.batches[0].transactions[0], self.block_tree_manager.generate_transaction("nonce")] return [self.block_tree_manager.generate_batch(txns=txns)]
class TestBlockPublisher(): ''' The block publisher has three main functions, and in these tests those functions are given the following wrappers for convenience: * on_batch_received -> receive_batches * on_chain_updated -> update_chain_head * on_check_publish_block -> publish_block After publishing a block, publish_block sends its block to the mock block sender, and that block is named result_block. This block is what is checked by the test assertions. The basic pattern for the publisher tests (with variations) is: 0) make a list of batches (usually in setUp); 1) receive the batches; 2) publish a block; 3) verify the block (checking that it contains the correct batches, or checking that it doesn't exist, or whatever). The publisher chain head might be updated several times in a test. ''' def __init__(self): self.block_tree_manager = BlockTreeManager() self.block_sender = MockBlockSender() self.batch_sender = MockBatchSender() self.state_view_factory = MockStateViewFactory({}) self.permission_verifier = MockPermissionVerifier() self.publisher = BlockPublisher( transaction_executor=MockTransactionExecutor(), block_cache=self.block_tree_manager.block_cache, state_view_factory=self.state_view_factory, settings_cache=SettingsCache( SettingsViewFactory( self.block_tree_manager.state_view_factory), ), block_sender=self.block_sender, batch_sender=self.batch_sender, squash_handler=None, chain_head=self.block_tree_manager.chain_head, identity_signer=self.block_tree_manager.identity_signer, data_dir=None, config_dir=None, check_publish_block_frequency=0.1, batch_observers=[], permission_verifier=self.permission_verifier) self.init_chain_head = self.block_tree_manager.chain_head self.result_block = None # A list of batches is created at the beginning of each test. # The test assertions and the publisher function wrappers # take these batches as a default argument. self.batch_count = 8 self.batches = self.make_batches() def verify_block(self, batches=None): if batches is None: batches = self.batches batch_count = None if batches is None else len(batches) # publisher functions def receive_batch(self, batch): self.publisher.on_batch_received(batch) def receive_batches(self, batches=None): if batches is None: batches = self.batches for batch in batches: self.receive_batch(batch) def publish_block(self): self.publisher.on_check_publish_block() self.result_block = self.block_sender.new_block self.block_sender.new_block = None def update_chain_head(self, head, committed=None, uncommitted=None): if head: self.block_tree_manager.block_store.update_chain([head]) self.publisher.on_chain_updated(chain_head=head, committed_batches=committed, uncommitted_batches=uncommitted) # batches def make_batch(self, missing_deps=False, txn_count=2): return self.block_tree_manager.generate_batch( txn_count=txn_count, missing_deps=missing_deps) def make_batches(self, batch_count=None, missing_deps=False): if batch_count is None: batch_count = self.batch_count return [ self.make_batch(missing_deps=missing_deps) for _ in range(batch_count) ] def make_batches_with_duplicate_txn(self): txns = [ self.batches[0].transactions[0], self.block_tree_manager.generate_transaction("nonce") ] return [self.block_tree_manager.generate_batch(txns=txns)]
class TestBlockPublisher(unittest.TestCase): ''' The block publisher has five main functions, and in these tests those functions are given the following wrappers for convenience: * on_batch_received -> receive_batches * on_chain_updated -> update_chain_head * initialize_block -> initialize_block * summarize_block -> summarize_block * finalize_block -> finalize_block Additionally, the publish_block is provided to call both initialize_block and finalize_block. After finalizing a block, finalize_block sends its block to the mock block sender, and that block is named result_block. This block is what is checked by the test assertions. The basic pattern for the publisher tests (with variations) is: 0) make a list of batches (usually in setUp); 1) receive the batches; 2) initialize a block; 3) finalize a block; 4) verify the block (checking that it contains the correct batches, or checking that it doesn't exist, etc.). ''' @unittest.mock.patch('test_journal.mock.MockBatchInjectorFactory') def setUp(self, mock_batch_injector_factory): mock_batch_injector_factory.create_injectors.return_value = [] self.block_tree_manager = BlockTreeManager() self.block_sender = MockBlockSender() self.batch_sender = MockBatchSender() self.state_view_factory = MockStateViewFactory({}) self.permission_verifier = MockPermissionVerifier() self.publisher = BlockPublisher( block_manager=self.block_tree_manager.block_manager, transaction_executor=MockTransactionExecutor(), transaction_committed=( self.block_tree_manager.block_store.has_transaction ), batch_committed=self.block_tree_manager.block_store.has_batch, state_view_factory=self.state_view_factory, block_sender=self.block_sender, batch_sender=self.batch_sender, chain_head=self.block_tree_manager.chain_head.block, identity_signer=self.block_tree_manager.identity_signer, data_dir=None, config_dir=None, batch_observers=[], permission_verifier=self.permission_verifier, batch_injector_factory=mock_batch_injector_factory) self.init_chain_head = self.block_tree_manager.chain_head self.result_block = None # A list of batches is created at the beginning of each test. # The test assertions and the publisher function wrappers # take these batches as a default argument. self.batch_count = 8 self.batches = self.make_batches() def test_publish(self): ''' Publish a block with several batches ''' self.receive_batches() self.publish_block() self.verify_block() def test_receive_after_initialize(self): ''' Receive batches after initialization ''' self.initialize_block() self.receive_batches() self.finalize_block() self.verify_block() def test_summarize_block(self): ''' Initialize a block and summarize it ''' self.receive_batches() self.initialize_block() self.assertIsNotNone(self.summarize_block(), 'Expected block summary') def test_reject_double_initialization(self): ''' Test that you can't initialize a candidate block twice ''' self.initialize_block() with self.assertRaises( BlockInProgress, msg='Second initialization should have rejected'): self.initialize_block() def test_reject_finalize_without_initialize(self): ''' Test that no block is published if the block hasn't been initialized ''' self.receive_batches() with self.assertRaises( BlockNotInitialized, msg='Block should not be finalized'): self.finalize_block() def test_reject_duplicate_batches_from_receive(self): ''' Test that duplicate batches from on_batch_received are rejected ''' for _ in range(5): self.receive_batches() self.publish_block() self.verify_block() def test_reject_duplicate_batches_from_store(self): ''' Test that duplicate batches from block store are rejected ''' self.update_chain_head( head=self.init_chain_head, uncommitted=self.batches) self.receive_batches() self.publish_block() self.verify_block() def test_committed_batches(self): ''' Test that batches committed upon updating the chain head are not included in the next block. ''' self.update_chain_head( head=self.init_chain_head, committed=self.batches) new_batches = self.make_batches(batch_count=12) self.receive_batches(new_batches) self.publish_block() self.verify_block(new_batches) def test_uncommitted_batches(self): ''' Test that batches uncommitted upon updating the chain head are included in the next block. ''' self.update_chain_head( head=self.init_chain_head, uncommitted=self.batches) self.publish_block() self.verify_block() def test_empty_pending_queue(self): ''' Test that no block is published if the pending queue is empty ''' # try to publish with no pending queue (failing) with self.assertRaises( BlockEmpty, msg='Block should not be published'): self.publish_block() self.assert_no_block_published() # receive batches, then try again (succeeding) self.receive_batches() self.finalize_block() self.verify_block() def test_missing_dependencies(self): ''' Test that no block is published with missing dependencies ''' self.batches = self.make_batches( missing_deps=True) self.receive_batches() # Block should be empty, since batches with missing deps aren't added with self.assertRaises(BlockEmpty, msg='Block should be empty'): self.publish_block() self.assert_no_block_published() @unittest.mock.patch('test_journal.mock.MockBatchInjectorFactory') def test_batches_rejected_by_scheduler(self, mock_batch_injector_factory): ''' Test that no block is published with batches rejected by the scheduler ''' mock_batch_injector_factory.create_injectors.return_value = [] self.publisher = BlockPublisher( block_manager=self.block_tree_manager.block_manager, transaction_executor=MockTransactionExecutor( batch_execution_result=False), transaction_committed=( self.block_tree_manager.block_store.has_transaction ), batch_committed=self.block_tree_manager.block_store.has_batch, state_view_factory=self.state_view_factory, block_sender=self.block_sender, batch_sender=self.batch_sender, chain_head=self.block_tree_manager.chain_head.block, identity_signer=self.block_tree_manager.identity_signer, data_dir=None, config_dir=None, batch_observers=[], permission_verifier=self.permission_verifier, batch_injector_factory=mock_batch_injector_factory) self.receive_batches() # Block should be empty since all batches are rejected with self.assertRaises(BlockEmpty, msg='Block should be empty'): self.publish_block() self.assert_no_block_published() @unittest.mock.patch('test_journal.mock.MockBatchInjectorFactory') def test_max_block_size(self, mock_batch_injector_factory): ''' Test block publisher obeys the block size limits ''' mock_batch_injector_factory.create_injectors.return_value = [] # Create a publisher that has a state view # with a batch limit addr, value = CreateSetting( 'sawtooth.publisher.max_batches_per_block', 1) self.state_view_factory = MockStateViewFactory( {addr: value}) self.publisher = BlockPublisher( block_manager=self.block_tree_manager.block_manager, transaction_executor=MockTransactionExecutor(), transaction_committed=( self.block_tree_manager.block_store.has_transaction ), batch_committed=self.block_tree_manager.block_store.has_batch, state_view_factory=self.state_view_factory, block_sender=self.block_sender, batch_sender=self.batch_sender, chain_head=self.block_tree_manager.chain_head.block, identity_signer=self.block_tree_manager.identity_signer, data_dir=None, config_dir=None, batch_observers=[], permission_verifier=self.permission_verifier, batch_injector_factory=mock_batch_injector_factory) self.assert_no_block_published() # receive batches, then try again (succeeding) self.receive_batches() # try to publish with no pending queue (failing) for i in range(self.batch_count): self.publish_block() self.assert_block_published() self.update_chain_head(BlockWrapper(self.result_block)) self.verify_block([self.batches[i]]) def test_duplicate_transactions(self): ''' Test discards batches that have duplicate transactions in them. ''' # receive batches, then try again (succeeding) self.batches = self.batches[1:2] self.receive_batches() self.publish_block() self.assert_block_published() self.update_chain_head(BlockWrapper(self.result_block)) self.verify_block() # build a new set of batches with the same transactions in them self.batches = self.make_batches_with_duplicate_txn() self.receive_batches() with self.assertRaises(BlockEmpty, msg='Block should be empty'): self.publish_block() self.assert_no_block_published() # block should be empty after batch # with duplicate transaction is dropped. def test_batch_injection_start_block(self): ''' Test that the batch is injected at the beginning of the block. ''' injected_batch = self.make_batch() self.publisher = BlockPublisher( block_manager=self.block_tree_manager.block_manager, transaction_executor=MockTransactionExecutor(), transaction_committed=( self.block_tree_manager.block_store.has_transaction ), batch_committed=self.block_tree_manager.block_store.has_batch, state_view_factory=self.state_view_factory, block_sender=self.block_sender, batch_sender=self.batch_sender, chain_head=self.block_tree_manager.chain_head.block, identity_signer=self.block_tree_manager.identity_signer, data_dir=None, config_dir=None, permission_verifier=self.permission_verifier, batch_observers=[], batch_injector_factory=MockBatchInjectorFactory(injected_batch)) self.receive_batches() self.publish_block() self.assert_batch_in_block(injected_batch) @unittest.mock.patch('test_journal.mock.MockBatchInjectorFactory') def test_validation_rules_reject_batches(self, mock_batch_injector_factory): """Test that a batch is not added to the block if it will violate the block validation rules. It does the following: - Sets the block_validation_rules to limit the number of 'test' transactions to 1 - creates two batches, limited to 1 transaction each, and receives them - verifies that only the first batch was committed to the block """ addr, value = CreateSetting( 'sawtooth.validator.block_validation_rules', 'NofX:1,test') self.state_view_factory = MockStateViewFactory( {addr: value}) mock_batch_injector_factory.create_injectors.return_value = [] batch1 = self.make_batch(txn_count=1) batch2 = self.make_batch(txn_count=1) self.publisher = BlockPublisher( block_manager=self.block_tree_manager.block_manager, transaction_executor=MockTransactionExecutor(), transaction_committed=( self.block_tree_manager.block_store.has_transaction ), batch_committed=self.block_tree_manager.block_store.has_batch, state_view_factory=self.state_view_factory, block_sender=self.block_sender, batch_sender=self.batch_sender, chain_head=self.block_tree_manager.chain_head.block, identity_signer=self.block_tree_manager.identity_signer, data_dir=None, config_dir=None, batch_observers=[], permission_verifier=self.permission_verifier, batch_injector_factory=mock_batch_injector_factory) self.receive_batches(batches=[batch1, batch2]) self.publish_block() self.assert_block_batch_count(1) self.assert_batch_in_block(batch1) # assertions def assert_block_published(self): self.assertIsNotNone( self.result_block, 'Block should have been published') def assert_no_block_published(self): self.assertIsNone( self.result_block, 'Block should not have been published') def assert_batch_in_block(self, batch): self.assertIn( batch, tuple(self.result_block.batches), 'Batch not in block') def assert_batches_in_block(self, batches=None): if batches is None: batches = self.batches for batch in batches: self.assert_batch_in_block(batch) def assert_block_batch_count(self, batch_count=None): if batch_count is None: batch_count = self.batch_count self.assertEqual( len(self.result_block.batches), batch_count, 'Wrong batch count in block') def verify_block(self, batches=None): if batches is None: batches = self.batches batch_count = None if batches is None else len(batches) self.assert_block_published() self.assert_batches_in_block(batches) self.assert_block_batch_count(batch_count) self.result_block = None # publisher functions def receive_batch(self, batch): self.publisher.on_batch_received(batch) def receive_batches(self, batches=None): if batches is None: batches = self.batches for batch in batches: self.receive_batch(batch) def initialize_block(self): self.publisher.initialize_block(self.block_tree_manager.chain_head) def summarize_block(self): return self.publisher.summarize_block() def finalize_block(self): self.publisher.finalize_block("") self.result_block = self.block_sender.new_block self.block_sender.new_block = None def publish_block(self): self.initialize_block() self.finalize_block() def update_chain_head(self, head, committed=None, uncommitted=None): if head: self.block_tree_manager.block_store.update_chain([head]) self.publisher.on_chain_updated( chain_head=head, committed_batches=committed, uncommitted_batches=uncommitted) # batches def make_batch(self, missing_deps=False, txn_count=2): return self.block_tree_manager.generate_batch( txn_count=txn_count, missing_deps=missing_deps) def make_batches(self, batch_count=None, missing_deps=False): if batch_count is None: batch_count = self.batch_count return [self.make_batch(missing_deps=missing_deps) for _ in range(batch_count)] def make_batches_with_duplicate_txn(self): txns = [self.batches[0].transactions[0], self.block_tree_manager.generate_transaction("nonce")] return [self.block_tree_manager.generate_batch(txns=txns)]
class TestBlockValidator(unittest.TestCase): def setUp(self): self.state_view_factory = MockStateViewFactory() self.block_tree_manager = BlockTreeManager() self.root = self.block_tree_manager.chain_head self.block_validation_handler = self.BlockValidationHandler() # fork based tests def test_fork_simple(self): """ Test a simple case of a new block extending the current root. """ new_block = self.block_tree_manager.generate_block( previous_block=self.root, add_to_cache=True) self.validate_block(new_block) self.assert_valid_block(new_block) self.assert_new_block_committed() def test_good_fork_lower(self): """ Test case of a new block extending on a valid chain but not as long as the current chain. """ # create a new valid chain 5 long from the current root chain, head = self.generate_chain_with_head(self.root, 5, {'add_to_store': True}) self.block_tree_manager.set_chain_head(head) # generate candidate chain 3 long from the same root new_chain, new_head = self.generate_chain_with_head( self.root, 3, {'add_to_cache': True}) self.validate_block(new_head) self.assert_valid_block(new_head) self.assert_new_block_not_committed() def test_good_fork_higher(self): """ Test case of a new block extending on a valid chain but longer than the current chain. ( similar to test_good_fork_lower but uses a different code path when finding the common root ) """ # create a new valid chain 5 long from the current root chain, head = self.generate_chain_with_head(self.root, 5, {'add_to_store': True}) self.block_tree_manager.set_chain_head(head) # generate candidate chain 8 long from the same root new_chain, new_head = self.generate_chain_with_head( head, 8, {'add_to_cache': True}) self.validate_block(new_head) self.assert_valid_block(new_head) self.assert_new_block_committed() def test_fork_different_genesis(self): """" Test the case where new block is from a different genesis """ # create a new valid chain 5 long from the current root chain, head = self.generate_chain_with_head(self.root, 5, {'add_to_store': True}) self.block_tree_manager.set_chain_head(head) # generate candidate chain 5 long from its own genesis new_chain, new_head = self.generate_chain_with_head( None, 5, {'add_to_cache': True}) self.validate_block(new_head) self.assert_invalid_block(new_head) self.assert_new_block_not_committed() def test_fork_missing_predecessor(self): """" Test the case where new block is missing the a predecessor """ # generate candidate chain 5 long off the current head. chain, head = self.generate_chain_with_head(self.root, 5, {'add_to_cache': True}) # remove one of the new blocks del self.block_tree_manager.block_cache[chain[1].identifier] self.validate_block(head) self.assert_invalid_block(head) self.assert_new_block_not_committed() def test_fork_invalid_predecessor(self): """" Test the case where new block has an invalid predecessor """ # generate candidate chain 5 long off the current head. chain, head = self.generate_chain_with_head(self.root, 5, {'add_to_cache': True}) # Mark a predecessor as invalid chain[1].status = BlockStatus.Invalid self.validate_block(head) self.assert_invalid_block(head) self.assert_new_block_not_committed() def test_block_bad_consensus(self): """ Test the case where the new block has a bad batch """ chain, head = self.generate_chain_with_head(self.root, 5, {'add_to_store': True}) new_block = self.block_tree_manager.generate_block( previous_block=head, add_to_cache=True, invalid_consensus=True) self.validate_block(new_block) self.assert_invalid_block(new_block) self.assert_new_block_not_committed() def test_block_bad_batch(self): """ Test the case where the new block has a bad batch """ chain, head = self.generate_chain_with_head(self.root, 5, {'add_to_store': True}) new_block = self.block_tree_manager.generate_block(previous_block=head, add_to_cache=True, invalid_batch=True) self.validate_block(new_block) self.assert_invalid_block(new_block) self.assert_new_block_not_committed() def test_block_missing_batch_dependency(self): """ Test the case where the new block has a batch that is missing a dependency. """ chain, head = self.generate_chain_with_head(self.root, 5, {'add_to_store': True}) txn = self.block_tree_manager.generate_transaction(deps=["missing"]) batch = self.block_tree_manager.generate_batch(txns=[txn]) new_block = self.block_tree_manager.generate_block(previous_block=head, add_to_cache=True, invalid_batch=True, batches=[batch]) self.validate_block(new_block) self.assert_invalid_block(new_block) self.assert_new_block_not_committed() def test_block_duplicate_batch(self): """ Test the case where the new block has a batch that already committed to the chain. """ chain, head = self.generate_chain_with_head(self.root, 5, {'add_to_store': True}) batch = self.block_tree_manager.generate_batch() new_block = self.block_tree_manager.generate_block(previous_block=head, add_to_cache=True, invalid_batch=True, batches=[batch]) self.validate_block(new_block) new_block = self.block_tree_manager.generate_block(previous_block=head, add_to_cache=True, invalid_batch=True, batches=[batch]) self.validate_block(new_block) self.assert_invalid_block(new_block) self.assert_new_block_not_committed() def test_block_duplicate_batch_in_block(self): """ Test the case where the new block has a duplicate batches. """ chain, head = self.generate_chain_with_head(self.root, 5, {'add_to_store': True}) batch = self.block_tree_manager.generate_batch() new_block = self.block_tree_manager.generate_block( previous_block=head, add_to_cache=True, invalid_batch=True, batches=[batch, batch]) self.validate_block(new_block) self.assert_invalid_block(new_block) self.assert_new_block_not_committed() def test_block_duplicate_transaction(self): """ Test the case where the new block has a transaction that is already committed. """ chain, head = self.generate_chain_with_head(self.root, 5, {'add_to_store': True}) txn = self.block_tree_manager.generate_transaction() batch = self.block_tree_manager.generate_batch(txns=[txn]) new_block = self.block_tree_manager.generate_block(previous_block=head, add_to_cache=True, invalid_batch=True, batches=[batch]) self.validate_block(new_block) txn2 = self.block_tree_manager.generate_transaction() batch = self.block_tree_manager.generate_batch(txns=[txn, txn2]) new_block = self.block_tree_manager.generate_block( previous_block=new_block, add_to_cache=True, invalid_batch=True, batches=[batch]) self.validate_block(new_block) self.assert_invalid_block(new_block) self.assert_new_block_not_committed() def test_block_duplicate_transaction_in_batch(self): """ Test the case where the new block has a batch that contains duplicate transactions. """ chain, head = self.generate_chain_with_head(self.root, 5, {'add_to_store': True}) txn = self.block_tree_manager.generate_transaction() batch = self.block_tree_manager.generate_batch(txns=[txn, txn]) new_block = self.block_tree_manager.generate_block(previous_block=head, add_to_cache=True, invalid_batch=True, batches=[batch]) self.validate_block(new_block) self.assert_invalid_block(new_block) self.assert_new_block_not_committed() # assertions def assert_valid_block(self, block): self.assertEqual(block.status, BlockStatus.Valid, "Block should be valid") def assert_invalid_block(self, block): self.assertEqual(block.status, BlockStatus.Invalid, "Block should be invalid") def assert_new_block_committed(self): self.assert_handler_has_result() self.assertTrue( self.block_validation_handler.result["commit_new_block"], "New block not committed, should be") def assert_new_block_not_committed(self): self.assert_handler_has_result() self.assertFalse( self.block_validation_handler.result["commit_new_block"], "New block committed, shouldn't be") def assert_handler_has_result(self): msg = "Validation handler doesn't have result" self.assertTrue(self.block_validation_handler.has_result(), msg) # block validation def validate_block(self, block): validator = self.create_block_validator( block, self.block_validation_handler.on_block_validated) validator.run() def create_block_validator(self, new_block, on_block_validated): return BlockValidator( consensus_module=mock_consensus, new_block=new_block, chain_head=self.block_tree_manager.chain_head, state_view_factory=self.state_view_factory, block_cache=self.block_tree_manager.block_cache, done_cb=on_block_validated, executor=MockTransactionExecutor(batch_execution_result=None), squash_handler=None, identity_signing_key=self.block_tree_manager.identity_signing_key, data_dir=None, config_dir=None) class BlockValidationHandler(object): def __init__(self): self.result = None def on_block_validated(self, commit_new_block, result): result["commit_new_block"] = commit_new_block self.result = result def has_result(self): return self.result is not None # block tree manager interface def generate_chain_with_head(self, root_block, num_blocks, params=None): chain = self.block_tree_manager.generate_chain(root_block, num_blocks, params) head = chain[-1] return chain, head
class TestBlockPublisher(unittest.TestCase): ''' The block publisher has five main functions, and in these tests those functions are given the following wrappers for convenience: * on_batch_received -> receive_batches * on_chain_updated -> update_chain_head * initialize_block -> initialize_block * summarize_block -> summarize_block * finalize_block -> finalize_block Additionally, the publish_block is provided to call both initialize_block and finalize_block. After finalizing a block, finalize_block sends its block to the mock block sender, and that block is named result_block. This block is what is checked by the test assertions. The basic pattern for the publisher tests (with variations) is: 0) make a list of batches (usually in setUp); 1) receive the batches; 2) initialize a block; 3) finalize a block; 4) verify the block (checking that it contains the correct batches, or checking that it doesn't exist, etc.). ''' @unittest.mock.patch('test_journal.mock.MockBatchInjectorFactory') def setUp(self, mock_batch_injector_factory): mock_batch_injector_factory.create_injectors.return_value = [] self.block_tree_manager = BlockTreeManager() self.block_sender = MockBlockSender() self.batch_sender = MockBatchSender() self.state_view_factory = MockStateViewFactory({}) self.permission_verifier = MockPermissionVerifier() self.publisher = BlockPublisher( block_manager=self.block_tree_manager.block_manager, transaction_executor=MockTransactionExecutor(), transaction_committed=( self.block_tree_manager.block_store.has_transaction), batch_committed=self.block_tree_manager.block_store.has_batch, state_view_factory=self.state_view_factory, block_sender=self.block_sender, batch_sender=self.batch_sender, chain_head=self.block_tree_manager.chain_head.block, identity_signer=self.block_tree_manager.identity_signer, data_dir=None, config_dir=None, batch_observers=[], permission_verifier=self.permission_verifier, batch_injector_factory=mock_batch_injector_factory) self.init_chain_head = self.block_tree_manager.chain_head self.result_block = None # A list of batches is created at the beginning of each test. # The test assertions and the publisher function wrappers # take these batches as a default argument. self.batch_count = 8 self.batches = self.make_batches() def test_publish(self): ''' Publish a block with several batches ''' self.receive_batches() self.publish_block() self.verify_block() def test_receive_after_initialize(self): ''' Receive batches after initialization ''' self.initialize_block() self.receive_batches() self.finalize_block() self.verify_block() def test_summarize_block(self): ''' Initialize a block and summarize it ''' self.receive_batches() self.initialize_block() self.assertIsNotNone(self.summarize_block(), 'Expected block summary') def test_reject_double_initialization(self): ''' Test that you can't initialize a candidate block twice ''' self.initialize_block() with self.assertRaises( BlockInProgress, msg='Second initialization should have rejected'): self.initialize_block() def test_reject_finalize_without_initialize(self): ''' Test that no block is published if the block hasn't been initialized ''' self.receive_batches() with self.assertRaises(BlockNotInitialized, msg='Block should not be finalized'): self.finalize_block() def test_reject_duplicate_batches_from_receive(self): ''' Test that duplicate batches from on_batch_received are rejected ''' for _ in range(5): self.receive_batches() self.publish_block() self.verify_block() def test_reject_duplicate_batches_from_store(self): ''' Test that duplicate batches from block store are rejected ''' self.update_chain_head(head=self.init_chain_head, uncommitted=self.batches) self.receive_batches() self.publish_block() self.verify_block() def test_committed_batches(self): ''' Test that batches committed upon updating the chain head are not included in the next block. ''' self.update_chain_head(head=self.init_chain_head, committed=self.batches) new_batches = self.make_batches(batch_count=12) self.receive_batches(new_batches) self.publish_block() self.verify_block(new_batches) def test_uncommitted_batches(self): ''' Test that batches uncommitted upon updating the chain head are included in the next block. ''' self.update_chain_head(head=self.init_chain_head, uncommitted=self.batches) self.publish_block() self.verify_block() def test_empty_pending_queue(self): ''' Test that no block is published if the pending queue is empty ''' # try to publish with no pending queue (failing) with self.assertRaises(BlockEmpty, msg='Block should not be published'): self.publish_block() self.assert_no_block_published() # receive batches, then try again (succeeding) self.receive_batches() self.finalize_block() self.verify_block() def test_missing_dependencies(self): ''' Test that no block is published with missing dependencies ''' self.batches = self.make_batches(missing_deps=True) self.receive_batches() # Block should be empty, since batches with missing deps aren't added with self.assertRaises(BlockEmpty, msg='Block should be empty'): self.publish_block() self.assert_no_block_published() @unittest.mock.patch('test_journal.mock.MockBatchInjectorFactory') def test_batches_rejected_by_scheduler(self, mock_batch_injector_factory): ''' Test that no block is published with batches rejected by the scheduler ''' mock_batch_injector_factory.create_injectors.return_value = [] self.publisher = BlockPublisher( block_manager=self.block_tree_manager.block_manager, transaction_executor=MockTransactionExecutor( batch_execution_result=False), transaction_committed=( self.block_tree_manager.block_store.has_transaction), batch_committed=self.block_tree_manager.block_store.has_batch, state_view_factory=self.state_view_factory, block_sender=self.block_sender, batch_sender=self.batch_sender, chain_head=self.block_tree_manager.chain_head.block, identity_signer=self.block_tree_manager.identity_signer, data_dir=None, config_dir=None, batch_observers=[], permission_verifier=self.permission_verifier, batch_injector_factory=mock_batch_injector_factory) self.receive_batches() # Block should be empty since all batches are rejected with self.assertRaises(BlockEmpty, msg='Block should be empty'): self.publish_block() self.assert_no_block_published() @unittest.mock.patch('test_journal.mock.MockBatchInjectorFactory') def test_max_block_size(self, mock_batch_injector_factory): ''' Test block publisher obeys the block size limits ''' mock_batch_injector_factory.create_injectors.return_value = [] # Create a publisher that has a state view # with a batch limit addr, value = CreateSetting('sawtooth.publisher.max_batches_per_block', 1) self.state_view_factory = MockStateViewFactory({addr: value}) self.publisher = BlockPublisher( block_manager=self.block_tree_manager.block_manager, transaction_executor=MockTransactionExecutor(), transaction_committed=( self.block_tree_manager.block_store.has_transaction), batch_committed=self.block_tree_manager.block_store.has_batch, state_view_factory=self.state_view_factory, block_sender=self.block_sender, batch_sender=self.batch_sender, chain_head=self.block_tree_manager.chain_head.block, identity_signer=self.block_tree_manager.identity_signer, data_dir=None, config_dir=None, batch_observers=[], permission_verifier=self.permission_verifier, batch_injector_factory=mock_batch_injector_factory) self.assert_no_block_published() # receive batches, then try again (succeeding) self.receive_batches() # try to publish with no pending queue (failing) for i in range(self.batch_count): self.publish_block() self.assert_block_published() self.update_chain_head(BlockWrapper(self.result_block)) self.verify_block([self.batches[i]]) def test_duplicate_transactions(self): ''' Test discards batches that have duplicate transactions in them. ''' # receive batches, then try again (succeeding) self.batches = self.batches[1:2] self.receive_batches() self.publish_block() self.assert_block_published() self.update_chain_head(BlockWrapper(self.result_block)) self.verify_block() # build a new set of batches with the same transactions in them self.batches = self.make_batches_with_duplicate_txn() self.receive_batches() with self.assertRaises(BlockEmpty, msg='Block should be empty'): self.publish_block() self.assert_no_block_published() # block should be empty after batch # with duplicate transaction is dropped. def test_batch_injection_start_block(self): ''' Test that the batch is injected at the beginning of the block. ''' injected_batch = self.make_batch() self.publisher = BlockPublisher( block_manager=self.block_tree_manager.block_manager, transaction_executor=MockTransactionExecutor(), transaction_committed=( self.block_tree_manager.block_store.has_transaction), batch_committed=self.block_tree_manager.block_store.has_batch, state_view_factory=self.state_view_factory, block_sender=self.block_sender, batch_sender=self.batch_sender, chain_head=self.block_tree_manager.chain_head.block, identity_signer=self.block_tree_manager.identity_signer, data_dir=None, config_dir=None, permission_verifier=self.permission_verifier, batch_observers=[], batch_injector_factory=MockBatchInjectorFactory(injected_batch)) self.receive_batches() self.publish_block() self.assert_batch_in_block(injected_batch) @unittest.mock.patch('test_journal.mock.MockBatchInjectorFactory') def test_validation_rules_reject_batches(self, mock_batch_injector_factory): """Test that a batch is not added to the block if it will violate the block validation rules. It does the following: - Sets the block_validation_rules to limit the number of 'test' transactions to 1 - creates two batches, limited to 1 transaction each, and receives them - verifies that only the first batch was committed to the block """ addr, value = CreateSetting( 'sawtooth.validator.block_validation_rules', 'NofX:1,test') self.state_view_factory = MockStateViewFactory({addr: value}) mock_batch_injector_factory.create_injectors.return_value = [] batch1 = self.make_batch(txn_count=1) batch2 = self.make_batch(txn_count=1) self.publisher = BlockPublisher( block_manager=self.block_tree_manager.block_manager, transaction_executor=MockTransactionExecutor(), transaction_committed=( self.block_tree_manager.block_store.has_transaction), batch_committed=self.block_tree_manager.block_store.has_batch, state_view_factory=self.state_view_factory, block_sender=self.block_sender, batch_sender=self.batch_sender, chain_head=self.block_tree_manager.chain_head.block, identity_signer=self.block_tree_manager.identity_signer, data_dir=None, config_dir=None, batch_observers=[], permission_verifier=self.permission_verifier, batch_injector_factory=mock_batch_injector_factory) self.receive_batches(batches=[batch1, batch2]) self.publish_block() self.assert_block_batch_count(1) self.assert_batch_in_block(batch1) # assertions def assert_block_published(self): self.assertIsNotNone(self.result_block, 'Block should have been published') def assert_no_block_published(self): self.assertIsNone(self.result_block, 'Block should not have been published') def assert_batch_in_block(self, batch): self.assertIn(batch, tuple(self.result_block.batches), 'Batch not in block') def assert_batches_in_block(self, batches=None): if batches is None: batches = self.batches for batch in batches: self.assert_batch_in_block(batch) def assert_block_batch_count(self, batch_count=None): if batch_count is None: batch_count = self.batch_count self.assertEqual(len(self.result_block.batches), batch_count, 'Wrong batch count in block') def verify_block(self, batches=None): if batches is None: batches = self.batches batch_count = None if batches is None else len(batches) self.assert_block_published() self.assert_batches_in_block(batches) self.assert_block_batch_count(batch_count) self.result_block = None # publisher functions def receive_batch(self, batch): self.publisher.on_batch_received(batch) def receive_batches(self, batches=None): if batches is None: batches = self.batches for batch in batches: self.receive_batch(batch) def initialize_block(self): self.publisher.initialize_block(self.block_tree_manager.chain_head) def summarize_block(self): return self.publisher.summarize_block() def finalize_block(self): self.publisher.finalize_block("") self.result_block = self.block_sender.new_block self.block_sender.new_block = None def publish_block(self): self.initialize_block() self.finalize_block() def update_chain_head(self, head, committed=None, uncommitted=None): if head: self.block_tree_manager.block_store.update_chain([head]) self.publisher.on_chain_updated(chain_head=head, committed_batches=committed, uncommitted_batches=uncommitted) # batches def make_batch(self, missing_deps=False, txn_count=2): return self.block_tree_manager.generate_batch( txn_count=txn_count, missing_deps=missing_deps) def make_batches(self, batch_count=None, missing_deps=False): if batch_count is None: batch_count = self.batch_count return [ self.make_batch(missing_deps=missing_deps) for _ in range(batch_count) ] def make_batches_with_duplicate_txn(self): txns = [ self.batches[0].transactions[0], self.block_tree_manager.generate_transaction("nonce") ] return [self.block_tree_manager.generate_batch(txns=txns)]