def _squash(state_root, context_ids, persist, clean_up): contexts_in_chain = deque() contexts_in_chain.extend(context_ids) context_ids_already_searched = [] context_ids_already_searched.extend(context_ids) # There is only one exit condition and that is when all the # contexts have been accessed once. updates = dict() deletes = set() while contexts_in_chain: current_c_id = contexts_in_chain.popleft() current_context = self._contexts[current_c_id] if not current_context.is_read_only(): current_context.make_read_only() addresses_w_values = current_context.get_all_if_set() for add, val in addresses_w_values.items(): # Since we are moving backwards through the graph of # contexts, only update if the address hasn't been set # or deleted if add not in updates and add not in deletes: updates[add] = val addresses_w_values = current_context.get_all_if_deleted() for add, _ in addresses_w_values.items(): # Since we are moving backwards through the graph of # contexts, only add to deletes if the address hasn't been # previously deleted or set in the graph if add not in updates and add not in deletes: deletes.add(add) for c_id in current_context.base_contexts: if c_id not in context_ids_already_searched: contexts_in_chain.append(c_id) context_ids_already_searched.append(c_id) tree = MerkleDatabase(self._database, state_root) # filter the delete list to just those items in the tree deletes = [addr for addr in deletes if addr in tree] if not updates and not deletes: return state_root virtual = not persist state_hash = tree.update(updates, deletes, virtual=virtual) if persist: # save the state changes to the state_delta_store changes = [StateChange(address=addr, value=value, type=StateChange.SET) for addr, value in updates.items()] +\ [StateChange(address=addr, type=StateChange.DELETE) for addr in deletes] self._state_delta_store.save_state_deltas(state_hash, changes) if clean_up: self.delete_contexts(context_ids_already_searched) return state_hash
def test_add_subscriber(self): """Test adding a subscriber, who has no known blocks. This scenerio is valid for subscribers who have never connected and would need to receive all deltas since the genesis block. On registration, the subscriber should receive one event, comprised of the state changes for the genesis block. """ mock_service = Mock() block_tree_manager = BlockTreeManager() delta_store = StateDeltaStore(DictDatabase()) delta_processor = StateDeltaProcessor( service=mock_service, state_delta_store=delta_store, block_store=block_tree_manager.block_store) delta_store.save_state_deltas( block_tree_manager.chain_head.state_root_hash, [StateChange(address='deadbeef0000000', value='my_genesis_value'.encode(), type=StateChange.SET), StateChange(address='a14ea01', value='some other state value'.encode(), type=StateChange.SET)]) delta_processor.add_subscriber( 'test_conn_id', [], ['deadbeef']) self.assertEqual(['test_conn_id'], delta_processor.subscriber_ids) # test that it catches up, and receives the events from the chain head. # In this case, it should just be once, as the chain head is the # genesis block chain_head = block_tree_manager.chain_head mock_service.send.assert_called_with( validator_pb2.Message.STATE_DELTA_EVENT, StateDeltaEvent( block_id=chain_head.identifier, block_num=chain_head.block_num, state_root_hash=chain_head.state_root_hash, previous_block_id=chain_head.previous_block_id, state_changes=[StateChange(address='deadbeef0000000', value='my_genesis_value'.encode(), type=StateChange.SET)] ).SerializeToString(), connection_id='test_conn_id')
def test_state_store_get_and_set(self): """Tests that we correctly get and set state changes to a StateDeltaStore. This tests sets a list of state change values and then gets them back, ensuring that the data is the same. """ database = DictDatabase() delta_store = StateDeltaStore(database) changes = [StateChange(address='a100000' + str(i), value=str(i).encode(), type=StateChange.SET) for i in range(0, 10)] delta_store.save_state_deltas('my_state_root_hash', changes) stored_changes = delta_store.get_state_deltas('my_state_root_hash') # This is a list-like repeated field, but to make it comparable we'll # have to generate a new list stored_changes = [c for c in stored_changes] self.assertEqual(changes, stored_changes)
def test_publish_deltas(self): """Tests that a subscriber filtering on an address prefix receives only the changes in an event that match. """ mock_service = Mock() block_tree_manager = BlockTreeManager() database = DictDatabase() delta_store = StateDeltaStore(database) delta_processor = StateDeltaProcessor( service=mock_service, state_delta_store=delta_store, block_store=block_tree_manager.block_store) delta_processor.add_subscriber( 'test_conn_id', [block_tree_manager.chain_head.identifier], ['deadbeef']) next_block = block_tree_manager.generate_block() # State added during context squash for our block delta_store.save_state_deltas(next_block.header.state_root_hash, [ StateChange(address='deadbeef01', value='my_state_Value'.encode(), type=StateChange.SET), StateChange(address='a14ea01', value='some other state value'.encode(), type=StateChange.SET) ]) # call to publish deltas for that block to the subscribers delta_processor.publish_deltas(next_block) mock_service.send.assert_called_with( validator_pb2.Message.STATE_DELTA_EVENT, StateDeltaEvent( block_id=next_block.identifier, block_num=next_block.header.block_num, state_root_hash=next_block.header.state_root_hash, previous_block_id=next_block.header.previous_block_id, state_changes=[ StateChange(address='deadbeef01', value='my_state_Value'.encode(), type=StateChange.SET) ]).SerializeToString(), connection_id='test_conn_id')
def test_get_events_ignore_bad_blocks(self): """Tests that the GetStateDeltaEventsHandler will return a response containing only the events for blocks that exists. """ block_tree_manager = BlockTreeManager() delta_store = StateDeltaStore(DictDatabase()) delta_store.save_state_deltas( block_tree_manager.chain_head.state_root_hash, [ StateChange(address='deadbeef0000000', value='my_genesis_value'.encode(), type=StateChange.SET), StateChange(address='a14ea01', value='some other state value'.encode(), type=StateChange.SET) ]) handler = GetStateDeltaEventsHandler(block_tree_manager.block_store, delta_store) request = GetStateDeltaEventsRequest( block_ids=[ block_tree_manager.chain_head.identifier, 'somebadblockid' ], address_prefixes=['deadbeef']).SerializeToString() response = handler.handle('test_conn_id', request) self.assertEqual(HandlerStatus.RETURN, response.status) self.assertEqual(GetStateDeltaEventsResponse.OK, response.message_out.status) chain_head = block_tree_manager.chain_head self.assertEqual([ StateDeltaEvent(block_id=chain_head.identifier, block_num=chain_head.block_num, state_root_hash=chain_head.state_root_hash, previous_block_id=chain_head.previous_block_id, state_changes=[ StateChange(address='deadbeef0000000', value='my_genesis_value'.encode(), type=StateChange.SET) ]) ], [event for event in response.message_out.events])
def test_publish_deltas_subscriber_matches_no_addresses(self): """Given a subscriber whose prefix filters don't match any addresses in the current state delta, it should still receive an event with the block change information. """ mock_service = Mock() block_tree_manager = BlockTreeManager() database = DictDatabase() delta_store = StateDeltaStore(database) delta_processor = StateDeltaProcessor( service=mock_service, state_delta_store=delta_store, block_store=block_tree_manager.block_store) delta_processor.add_subscriber( 'settings_conn_id', [block_tree_manager.chain_head.identifier], ['000000']) next_block = block_tree_manager.generate_block() # State added during context squash for our block delta_store.save_state_deltas( next_block.header.state_root_hash, [StateChange(address='deadbeef01', value='my_state_Value'.encode(), type=StateChange.SET), StateChange(address='a14ea01', value='some other state value'.encode(), type=StateChange.SET)]) # call to publish deltas for that block to the subscribers delta_processor.publish_deltas(next_block) mock_service.send.assert_called_with( validator_pb2.Message.STATE_DELTA_EVENT, StateDeltaEvent( block_id=next_block.identifier, block_num=next_block.header.block_num, state_root_hash=next_block.header.state_root_hash, state_changes=[] ).SerializeToString(), connection_id='settings_conn_id')
def test_squash(self): """Tests that squashing a context based on state from other contexts will result in the same merkle hash as updating the merkle tree with the same data. Notes: Set up the context Test: 1) Make set calls on several of the addresses. 2) Squash the context to get a new state hash. 3) Apply all of the aggregate sets from all of the contexts, to another database with a merkle tree. 4) Assert that the state hashes are the same. 5) Assert that the state deltas have been stored """ # 1) context_id = self._setup_context() self.context_manager.set(context_id, [{ self._create_address(a): v } for a, v in [('yyyy', b'2'), ('tttt', b'4')]]) # 2) squash = self.context_manager.get_squash_handler() resulting_state_hash = squash(self.first_state_hash, [context_id], persist=True, clean_up=True) # 3) final_state_to_update = { self._create_address(a): v for a, v in [('llaa', b'1'), ('aall', b'2'), ( 'nnnn', b'3'), ('zzzz', b'9'), ('yyyy', b'2'), ( 'tttt', b'4'), ('qqqq', b'13'), ('oooo', b'25'), ( 'oozz', b'26'), ('zzoo', b'27'), ('ppoo', b'28'), ('aeio', b'29')] } test_merkle_tree = MerkleDatabase(self.database_results) test_resulting_state_hash = test_merkle_tree.update( final_state_to_update, virtual=False) # 4) self.assertEqual(resulting_state_hash, test_resulting_state_hash) state_changes = self.state_delta_store.get_state_deltas( resulting_state_hash) # 5) for addr, value in final_state_to_update.items(): expected_state_change = StateChange(address=addr, value=value, type=StateChange.SET) self.assertTrue(expected_state_change in state_changes)
def test_squash_no_updates(self): """Tests that squashing a context that has no state updates will return the starting state root hash. Notes: Set up the context Test: 1) Squash the context. 2) Assert that the state hash is the same as the starting hash. 3) Assert that the state deltas have not been overwritten """ self.state_delta_store.save_state_deltas( self.first_state_hash, [StateChange(address='aaa', value=b'xyz', type=StateChange.SET)]) context_id = self.context_manager.create_context( state_hash=self.first_state_hash, base_contexts=[], inputs=[], outputs=[]) # 1) squash = self.context_manager.get_squash_handler() resulting_state_hash = squash(self.first_state_hash, [context_id], persist=True, clean_up=True) # 2 self.assertIsNotNone(resulting_state_hash) self.assertEquals(resulting_state_hash, self.first_state_hash) # 3 changes = self.state_delta_store.get_state_deltas(resulting_state_hash) self.assertEqual( [StateChange(address='aaa', value=b'xyz', type=StateChange.SET)], [c for c in changes])
def test_squash(self): """Tests that squashing a context based on state from other contexts will result in the same merkle hash as updating the merkle tree with the same data. Notes: Set up the context Test: 1) Make set calls on several of the addresses. 2) Squash the context to get a new state hash. 3) Apply all of the aggregate sets from all of the contexts, to another database with a merkle tree. 4) Assert that the state hashes are the same. 5) Assert that the state deltas have been stored """ # 1) context_id = self._setup_context() self.context_manager.set(context_id, [{'bbbb': b'2'}, {'eeee': b'4'}]) # 2) squash = self.context_manager.get_squash_handler() resulting_state_hash = squash(self.first_state_hash, [context_id], persist=True) # 3) final_state_to_update = { 'aaaa': b'25', 'bbbb': b'2', 'cccc': b'27', 'dddd': b'28', 'eeee': b'4' } test_merkle_tree = MerkleDatabase(self.database_results) test_resulting_state_hash = test_merkle_tree.update( final_state_to_update, virtual=False) # 4) self.assertEqual(resulting_state_hash, test_resulting_state_hash) state_changes = self.state_delta_store.get_state_deltas( resulting_state_hash) # 5) for addr, value in final_state_to_update.items(): expected_state_change = StateChange(address=addr, value=value, type=StateChange.SET) self.assertTrue(expected_state_change in state_changes)
def _squash(state_root, context_ids, persist, clean_up): contexts_in_chain = deque() contexts_in_chain.extend(context_ids) context_ids_already_searched = [] context_ids_already_searched.extend(context_ids) # There is only one exit condition and that is when all the # contexts have been accessed once. updates = dict() while len(contexts_in_chain) > 0: current_c_id = contexts_in_chain.popleft() current_context = self._contexts[current_c_id] if not current_context.is_read_only(): current_context.make_read_only() addresses_w_values = current_context.get_all_if_set() for add, val in addresses_w_values.items(): # Since we are moving backwards through the graph of # contexts, only update if the address hasn't been set if add not in updates: updates[add] = val for c_id in current_context.base_contexts: if c_id not in context_ids_already_searched: contexts_in_chain.append(c_id) context_ids_already_searched.append(c_id) if len(updates) == 0: return state_root tree = MerkleDatabase(self._database, state_root) virtual = not persist state_hash = tree.update(updates, virtual=virtual) if persist: # save the state changes to the state_delta_store changes = [ StateChange(address=addr, value=value, type=StateChange.SET) for addr, value in updates.items() ] self._state_delta_store.save_state_deltas(state_hash, changes) if clean_up: self.delete_contexts(context_ids_already_searched) return state_hash
def test_receipt_store_get_and_set(self): """Tests that we correctly get and set state changes to a ReceiptStore. This test sets a list of receipts and then gets them back, ensuring that the data is the same. """ receipt_store = TransactionReceiptStore(DictDatabase()) receipts = [] for i in range(10): state_changes = [] events = [] data = [] for j in range(10): string = str(j) byte = string.encode() state_changes.append(StateChange( address='a100000' + string, value=byte, type=StateChange.SET)) events.append(Event( event_type="test", data=byte, attributes=[Event.Attribute(key=string, value=string)])) data.append(TransactionReceipt.Data( data_type="test", data=byte)) receipts.append(TransactionReceipt( state_changes=state_changes, events=events, data=data)) for i in range(len(receipts)): receipt_store.put(str(i), receipts[i]) for i in range(len(receipts)): stored_receipt = receipt_store.get(str(i)) self.assertEqual(stored_receipt.state_changes, receipts[i].state_changes) self.assertEqual(stored_receipt.events, receipts[i].events) self.assertEqual(stored_receipt.data, receipts[i].data)
def _squash(state_root, context_ids, persist): tree = MerkleDatabase(self._database, state_root) updates = dict() for c_id in context_ids: context = self._contexts[c_id] for add in context.get_state().keys(): if add in updates: raise SquashException( "Duplicate address {} in context {}".format( add, c_id)) effective_updates = {} for k, val_fut in context.get_state().items(): value = val_fut.result() if value is not None: effective_updates[k] = value updates.update(effective_updates) if len(updates) == 0: return state_root virtual = not persist state_hash = tree.update(updates, virtual=virtual) if persist: # save the state changes to the state_delta_store changes = [ StateChange(address=addr, value=value, type=StateChange.SET) for addr, value in updates.items() ] self._state_delta_store.save_state_deltas(state_hash, changes) # clean up all contexts that are involved in being squashed. base_c_ids = [] for c_id in context_ids: base_c_ids += self._contexts[c_id].base_context_ids all_context_ids = base_c_ids + context_ids self.delete_context(all_context_ids) return state_hash