def make_db_and_store(size=3): database = DictDatabase() store = MockBlockStore(size=0) roots = [] merkle = MerkleDatabase(database) data = {} # Create all the keys that will be used. Keys are zero-padded hex strings # starting with '1'. keys = [format(i, 'x').zfill(70) for i in range(1, size + 1)] for i in range(1, size + 1): # Construct the state for this root data = {} for key_idx in range(i): key = keys[key_idx] # Calculate unique values based on the key and root val = i + (2 * key_idx) data[key] = str(val).encode() root = merkle.update(data, virtual=False) roots.append(root) store.add_block(str(i), root) return database, store, roots
def _squash(state_root, context_ids, persist, clean_up): contexts_in_chain = deque() contexts_in_chain.extend(context_ids) context_ids_already_searched = [] context_ids_already_searched.extend(context_ids) # There is only one exit condition and that is when all the # contexts have been accessed once. updates = dict() deletes = set() while contexts_in_chain: current_c_id = contexts_in_chain.popleft() current_context = self._contexts[current_c_id] if not current_context.is_read_only(): current_context.make_read_only() addresses_w_values = current_context.get_all_if_set() for add, val in addresses_w_values.items(): # Since we are moving backwards through the graph of # contexts, only update if the address hasn't been set # or deleted if add not in updates and add not in deletes: updates[add] = val addresses_w_values = current_context.get_all_if_deleted() for add, _ in addresses_w_values.items(): # Since we are moving backwards through the graph of # contexts, only add to deletes if the address hasn't been # previously deleted or set in the graph if add not in updates and add not in deletes: deletes.add(add) for c_id in current_context.base_contexts: if c_id not in context_ids_already_searched: contexts_in_chain.append(c_id) context_ids_already_searched.append(c_id) tree = MerkleDatabase(self._database, state_root) # filter the delete list to just those items in the tree deletes = [addr for addr in deletes if addr in tree] if not updates and not deletes: return state_root virtual = not persist state_hash = tree.update(updates, deletes, virtual=virtual) if persist: # save the state changes to the state_delta_store changes = [StateChange(address=addr, value=value, type=StateChange.SET) for addr, value in updates.items()] +\ [StateChange(address=addr, type=StateChange.DELETE) for addr in deletes] self._state_delta_store.save_state_deltas(state_hash, changes) if clean_up: self.delete_contexts(context_ids_already_searched) return state_hash
def _squash(state_root, context_ids, persist): tree = MerkleDatabase(self._database, state_root) updates = dict() for c_id in context_ids: context = self._contexts[c_id] for add in context.get_state().keys(): if add in updates: raise SquashException( "Duplicate address {} in context {}".format( add, c_id)) effective_updates = {} for k, val_fut in context.get_state().items(): value = val_fut.result() if value is not None: effective_updates[k] = value updates.update(effective_updates) if len(updates) == 0: return state_root virtual = not persist state_hash = tree.update(updates, virtual=virtual) if persist: # clean up all contexts that are involved in being squashed. base_c_ids = [] for c_id in context_ids: base_c_ids += self._contexts[c_id].base_context_ids all_context_ids = base_c_ids + context_ids self.delete_context(all_context_ids) return state_hash
def setUp(self): self._temp_dir = tempfile.mkdtemp() self._database = NativeLmdbDatabase( os.path.join(self._temp_dir, 'test_identity_view.lmdb'), _size=10 * 1024 * 1024) self._tree = MerkleDatabase(self._database)
def setUp(self): self.dir = tempfile.mkdtemp() self.file = os.path.join(self.dir, 'merkle.lmdb') self.lmdb = NativeLmdbDatabase(self.file, _size=120 * 1024 * 1024) self.trie = MerkleDatabase(self.lmdb)
def create_view(self, state_root_hash): """Creates a StateView for the given state root hash. Returns: StateView: state view locked to the given root hash. """ return StateView(MerkleDatabase(self._database, state_root_hash))
def setUp(self): self.dir = tempfile.mkdtemp() self.file = os.path.join(self.dir, 'merkle.lmdb') self.lmdb = lmdb_nolock_database.LMDBNoLockDatabase(self.file, 'n') self.trie = MerkleDatabase(self.lmdb)
def make_db_and_store(size=3, start='a'): """ Creates and returns three related objects for testing: * database - dict database with evolving state * store - blocks with root hashes corresponding to that state * roots - list of root hashes used in order With defaults, the values at the three roots look like this: * 0 - {'a': b'1'} * 1 - {'a': b'2', 'b': b'4'} * 2 - {'a': b'3', 'b': b'5', 'c': b'7'} """ database = DictDatabase() store = MockBlockStore(size=0) roots = [] merkle = MerkleDatabase(database) data = {} for i in range(size): for k, v in data.items(): data[k] = str(int(v) + 1).encode() data[_increment_key(start, i)] = str(i * size + 1).encode() root = merkle.update(data, virtual=False) roots.append(root) store.add_block(str(i), root) return database, store, roots
def get(self, context_id, address_list): """Get the values associated with list of addresses, for a specific context referenced by context_id. Args: context_id (str): the return value of create_context, referencing a particular context. address_list (list): a list of address strs Returns: values_list (list): a list of (address, value) tuples Raises: AuthorizationException: Raised when an address in address_list is not authorized either by not being in the inputs for the txn associated with this context, or it is under a namespace but the characters that are under the namespace are not valid address characters. """ if context_id not in self._contexts: return [] for add in address_list: if not self.address_is_valid(address=add): raise AuthorizationException(address=add) context = self._contexts[context_id] addresses_in_ctx = [add for add in address_list if add in context] addresses_not_in_ctx = list(set(address_list) - set(addresses_in_ctx)) values = context.get(addresses_in_ctx) values_list = list(zip(addresses_in_ctx, values)) if addresses_not_in_ctx: # Validate the addresses that won't be validated by a direct get on # the context. for address in addresses_not_in_ctx: context.validate_read(address) address_values, reads = self._find_address_values_in_chain( base_contexts=[context_id], addresses_to_find=addresses_not_in_ctx) values_list.extend(address_values) if reads: tree = MerkleDatabase(self._database, context.merkle_root) add_values = [] for add in reads: value = None try: value = tree.get(add) except KeyError: # The address is not in the radix tree/merkle tree pass add_values.append((add, value)) values_list.extend(add_values) values_list.sort(key=lambda x: address_list.index(x[0])) return values_list
def run(self): # start once and works all time #LOGGER.debug('_ContextReader: run \n') while True: context_state_addresslist_tuple = self._addresses.get(block=True) if context_state_addresslist_tuple is _SHUTDOWN_SENTINEL: break c_id, state_hash, address_list = context_state_addresslist_tuple #LOGGER.debug('_ContextReader: run state_hash=%s\n',state_hash) tree = MerkleDatabase(self._database, state_hash) """ # for testing only # check state for testing try: tree._get_by_addr("449095bc5d9deba00a635d8db93c9deeb043416204f494b9f07862e9445559f0185109") LOGGER.debug('_ContextReader: ADDRESS YES \n') except : LOGGER.debug('_ContextReader: ADDRESS NO \n') """ return_values = [] for address in address_list: value = None try: value = tree.get(address) except KeyError: pass return_values.append((address, value)) self._inflated_addresses.put((c_id, return_values))
def compute_state_hashes_wo_scheduler(self): """Creates a state hash from the state updates from each txn in a valid batch. Returns state_hashes (list of str): The merkle roots from state changes in 1 or more blocks in the yaml file. """ tree = MerkleDatabase(database=DictDatabase()) state_hashes = [] updates = {} for batch in self._batches: b_id = batch.header_signature result = self._batch_results[b_id] if result.is_valid: for txn in batch.transactions: txn_id = txn.header_signature _, address_values = self._txn_execution[txn_id] batch_updates = {} for pair in address_values: batch_updates.update({a: pair[a] for a in pair.keys()}) # since this is entirely serial, any overwrite # of an address is expected and desirable. updates.update(batch_updates) # This handles yaml files that have state roots in them if result.state_hash is not None: s_h = tree.update(set_items=updates, virtual=False) tree.set_merkle_root(merkle_root=s_h) state_hashes.append(s_h) if not state_hashes: state_hashes.append(tree.update(set_items=updates)) return state_hashes
def __init__(self, database, block_store): super().__init__( client_pb2.ClientStateGetRequest, client_pb2.ClientStateGetResponse, validator_pb2.Message.CLIENT_STATE_GET_RESPONSE, tree=MerkleDatabase(database), block_store=block_store)
def __init__(self): self.dir = '/tmp/sawtooth' # tempfile.mkdtemp() self.file = os.path.join(self.dir, 'merkle.lmdb') self.lmdb = lmdb_nolock_database.LMDBNoLockDatabase(self.file, 'n') self.trie = MerkleDatabase(self.lmdb)
def setUp(self): self._temp_dir = tempfile.mkdtemp() self._database = NativeLmdbDatabase( os.path.join(self._temp_dir, 'test_identity_view.lmdb'), indexes=MerkleDatabase.create_index_configuration(), _size=10 * 1024 * 1024) self._tree = MerkleDatabase(self._database)
def setUp(self): self.dir = tempfile.mkdtemp() self.file = os.path.join(self.dir, 'merkle.lmdb') self.lmdb = NativeLmdbDatabase( self.file, indexes=MerkleDatabase.create_index_configuration(), _size=120 * 1024 * 1024) self.trie = MerkleDatabase(self.lmdb)
def test_squash(self): """Tests that squashing a context based on state from other contexts will result in the same merkle hash as updating the merkle tree with the same data. Notes: Set up the context Test: 1) Make set calls on several of the addresses. 2) Squash the context to get a new state hash. 3) Apply all of the aggregate sets from all of the contexts, to another database with a merkle tree. 4) Assert that the state hashes are the same. 5) Assert that the state deltas have been stored """ # 1) context_id = self._setup_context() self.context_manager.set(context_id, [{ self._create_address(a): v } for a, v in [('yyyy', b'2'), ('tttt', b'4')]]) # 2) squash = self.context_manager.get_squash_handler() resulting_state_hash = squash(self.first_state_hash, [context_id], persist=True, clean_up=True) # 3) final_state_to_update = { self._create_address(a): v for a, v in [('llaa', b'1'), ('aall', b'2'), ( 'nnnn', b'3'), ('zzzz', b'9'), ('yyyy', b'2'), ( 'tttt', b'4'), ('qqqq', b'13'), ('oooo', b'25'), ( 'oozz', b'26'), ('zzoo', b'27'), ('ppoo', b'28'), ('aeio', b'29')] } test_merkle_tree = MerkleDatabase(self.database_results) test_resulting_state_hash = test_merkle_tree.update( final_state_to_update, virtual=False) # 4) self.assertEqual(resulting_state_hash, test_resulting_state_hash) state_changes = self.state_delta_store.get_state_deltas( resulting_state_hash) # 5) for addr, value in final_state_to_update.items(): expected_state_change = StateChange(address=addr, value=value, type=StateChange.SET) self.assertTrue(expected_state_change in state_changes)
def commit_context(self, context_id_list, virtual): """ Only used in a test --- Commits the state from the contexts referred to in context_id_list to the merkle tree. Args: context_id_list (list of str): The context ids with state to commit to the merkle tree. virtual (bool): True if the data in contexts shouldn't be written to the merkle tree, but just return a merkle root. Returns: state_hash (str): the new state hash after the context_id_list has been committed """ if any([c_id not in self._contexts for c_id in context_id_list]): raise CommitException("Context Id not in contexts") first_id = context_id_list[0] if not all([ self._contexts[first_id].merkle_root == self._contexts[c_id].merkle_root for c_id in context_id_list ]): raise CommitException( "MerkleRoots not all equal, yet asking to merge") merkle_root = self._contexts[first_id].merkle_root tree = MerkleDatabase(self._database, merkle_root=merkle_root) updates = dict() for c_id in context_id_list: context = self._contexts[c_id] for add in context.get_state().keys(): if add in updates: raise CommitException( "Duplicate address {} in context {}".format(add, c_id)) effective_updates = {} for k, val_fut in context.get_state().items(): value = val_fut.result() if value is not None: effective_updates[k] = value updates.update(effective_updates) state_hash = tree.update(updates, virtual=False) # clean up all contexts that are involved in being squashed. base_c_ids = [] for c_id in context_id_list: base_c_ids += self._contexts[c_id].base_context_ids all_context_ids = base_c_ids + context_id_list self.delete_context(all_context_ids) return state_hash
def test_empty_batch_file_should_produce_block( self, mock_scheduler_complete ): """ In this case, the genesis batch, even with an empty list of batches, should produce a genesis block. Also: - the genesis.batch file should be deleted - the block_chain_id file should be created and populated """ genesis_file = self._with_empty_batch_file() block_store = self.make_block_store() block_manager = BlockManager() block_manager.add_commit_store(block_store) state_database = NativeLmdbDatabase( os.path.join(self._temp_dir, 'test_genesis.lmdb'), indexes=MerkleDatabase.create_index_configuration(), _size=10 * 1024 * 1024) merkle_db = MerkleDatabase(state_database) ctx_mgr = Mock(name='ContextManager') ctx_mgr.get_squash_handler.return_value = Mock() ctx_mgr.get_first_root.return_value = merkle_db.get_merkle_root() txn_executor = Mock(name='txn_executor') completer = Mock('completer') completer.add_block = Mock('add_block') genesis_ctrl = GenesisController( context_manager=ctx_mgr, transaction_executor=txn_executor, completer=completer, block_store=block_store, state_view_factory=StateViewFactory(state_database), identity_signer=self._signer, block_manager=block_manager, data_dir=self._temp_dir, config_dir=self._temp_dir, chain_id_manager=ChainIdManager(self._temp_dir), batch_sender=Mock('batch_sender'), receipt_store=MagicMock()) on_done_fn = Mock(return_value='') genesis_ctrl.start(on_done_fn) self.assertEqual(False, os.path.exists(genesis_file)) self.assertEqual(True, block_store.chain_head is not None) self.assertEqual(1, on_done_fn.call_count) self.assertEqual(1, completer.add_block.call_count) self.assertEqual(block_store.chain_head.identifier, self._read_block_chain_id())
def setUp(self): database = DictDatabase() state_view_factory = StateViewFactory(database) self._config_view_factory = ConfigViewFactory(state_view_factory) merkle_db = MerkleDatabase(database) self._current_root_hash = merkle_db.update({ TestConfigView._address('my.setting'): TestConfigView._setting_entry('my.setting', '10'), TestConfigView._address('my.setting.list'): TestConfigView._setting_entry('my.setting.list', '10,11,12'), TestConfigView._address('my.other.list'): TestConfigView._setting_entry('my.other.list', '13;14;15') }, virtual=False)
def run(self): while True: context_state_addresslist_tuple = self._addresses.get(block=True) c_id, state_hash, address_list = context_state_addresslist_tuple tree = MerkleDatabase(self._database, state_hash) return_values = [] for address in address_list: value = None try: value = tree.get(address) except KeyError: pass return_values.append((address, value)) self._inflated_addresses.put((c_id, return_values))
def test_squash(self): """Tests that squashing a context based on state from other contexts will result in the same merkle hash as updating the merkle tree with the same data. Notes: Set up the context Test: 1) Make set calls on several of the addresses. 2) Squash the context to get a new state hash. 3) Apply all of the aggregate sets from all of the contexts, to another database with a merkle tree. 4) Assert that the state hashes are the same. 5) Assert that the state deltas have been stored """ # 1) context_id = self._setup_context() self.context_manager.set(context_id, [{'bbbb': b'2'}, {'eeee': b'4'}]) # 2) squash = self.context_manager.get_squash_handler() resulting_state_hash = squash(self.first_state_hash, [context_id], persist=True) # 3) final_state_to_update = { 'aaaa': b'25', 'bbbb': b'2', 'cccc': b'27', 'dddd': b'28', 'eeee': b'4' } test_merkle_tree = MerkleDatabase(self.database_results) test_resulting_state_hash = test_merkle_tree.update( final_state_to_update, virtual=False) # 4) self.assertEqual(resulting_state_hash, test_resulting_state_hash) state_changes = self.state_delta_store.get_state_deltas( resulting_state_hash) # 5) for addr, value in final_state_to_update.items(): expected_state_change = StateChange(address=addr, value=value, type=StateChange.SET) self.assertTrue(expected_state_change in state_changes)
def _recompute_state_hash(state_root, context=None): # for DAG only - recompute state state_hash = None try: tree = MerkleDatabase(self._database, state_root) state_hash = tree.update(context['updates'], context['deletes'], virtual=True) LOGGER.debug('_recompute_state_hash: STATE=%s->%s\n', state_root[:8], state_hash[:8]) except: LOGGER.debug('_recompute_state_hash: BAD STATE=%s\n', state_root[:8]) return state_hash
def commit_context(self, context_id_list, virtual): """ Part of the interface to the Executor Args: context_id_list: Returns: state_hash (str): the new state hash after the context_id_list has been committed """ if any([c_id not in self._contexts for c_id in context_id_list]): raise CommitException("Context Id not in contexts") first_id = context_id_list[0] if not all([ self._contexts[first_id].merkle_root == self._contexts[c_id].merkle_root for c_id in context_id_list ]): raise CommitException( "MerkleRoots not all equal, yet asking to merge") merkle_root = self._contexts[first_id].merkle_root tree = MerkleDatabase(self._database, merkle_root) merged_updates = {} for c_id in context_id_list: with self._shared_lock: context = self._contexts[c_id] del self._contexts[c_id] for k in context.get_writable_address_value_dict().keys(): if k in merged_updates: raise CommitException( "Duplicate address {} in context {}".format(k, c_id)) merged_updates.update(context.get_writable_address_value_dict()) new_root = merkle_root add_value_dict = {} for k, val_fut in merged_updates.items(): value = val_fut.result() if value is not None: add_value_dict[k] = value new_root = tree.update(set_items=add_value_dict, virtual=virtual) return new_root
def create_view(self, state_root_hash=None): """Creates a StateView for the given state root hash. Args: state_root_hash (str): The state root hash of the state view to return. If None, returns the state view for the Returns: StateView: state view locked to the given root hash. """ # Create a default Merkle database and if we have a state root hash, # update the Merkle database's root to that merkle_db = MerkleDatabase(self._database) if state_root_hash is not None: merkle_db.set_merkle_root(state_root_hash) return StateView(merkle_db)
def setUp(self): self._temp_dir = tempfile.mkdtemp() database = NativeLmdbDatabase( os.path.join(self._temp_dir, 'test_config_view.lmdb'), _size=10 * 1024 * 1024) state_view_factory = StateViewFactory(database) self._settings_view_factory = SettingsViewFactory(state_view_factory) merkle_db = MerkleDatabase(database) self._current_root_hash = merkle_db.update({ TestSettingsView._address('my.setting'): TestSettingsView._setting_entry('my.setting', '10'), TestSettingsView._address('my.setting.list'): TestSettingsView._setting_entry('my.setting.list', '10,11,12'), TestSettingsView._address('my.other.list'): TestSettingsView._setting_entry('my.other.list', '13;14;15') }, virtual=False)
def _squash(state_root, context_ids): tree = MerkleDatabase(self._database, state_root) updates = dict() for c_id in context_ids: with self._shared_lock: context = self._contexts[c_id] for add in context.get_address_value_dict().keys(): if add in updates: raise SquashException( "Duplicate address {} in context {}".format( add, c_id)) updates.update({ k: v.result() for k, v in context.get_address_value_dict().items() }) state_hash = tree.update(updates, virtual=False) return state_hash
def test_empty_batch_file_should_produce_block(self, mock_scheduler_complete): """ In this case, the genesis batch, even with an empty list of batches, should produce a genesis block. Also: - the genesis.batch file should be deleted - the block_chain_id file should be created and populated """ genesis_file = self._with_empty_batch_file() block_store = self.make_block_store() state_database = DictDatabase() merkle_db = MerkleDatabase(state_database) ctx_mgr = Mock(name='ContextManager') ctx_mgr.get_squash_handler.return_value = Mock() ctx_mgr.get_first_root.return_value = merkle_db.get_merkle_root() txn_executor = Mock(name='txn_executor') completer = Mock('completer') completer.add_block = Mock('add_block') genesis_ctrl = GenesisController(ctx_mgr, txn_executor, completer, block_store, StateViewFactory(state_database), self._signer, data_dir=self._temp_dir, config_dir=self._temp_dir, chain_id_manager=ChainIdManager( self._temp_dir), batch_sender=Mock('batch_sender')) on_done_fn = Mock(return_value='') genesis_ctrl.start(on_done_fn) self.assertEqual(False, os.path.exists(genesis_file)) self.assertEqual(True, block_store.chain_head is not None) self.assertEqual(1, on_done_fn.call_count) self.assertEqual(1, completer.add_block.call_count) self.assertEqual(block_store.chain_head.identifier, self._read_block_chain_id())
def _squash(state_root, context_ids, persist, clean_up): contexts_in_chain = deque() contexts_in_chain.extend(context_ids) context_ids_already_searched = [] context_ids_already_searched.extend(context_ids) # There is only one exit condition and that is when all the # contexts have been accessed once. updates = dict() while len(contexts_in_chain) > 0: current_c_id = contexts_in_chain.popleft() current_context = self._contexts[current_c_id] if not current_context.is_read_only(): current_context.make_read_only() addresses_w_values = current_context.get_all_if_set() for add, val in addresses_w_values.items(): # Since we are moving backwards through the graph of # contexts, only update if the address hasn't been set if add not in updates: updates[add] = val for c_id in current_context.base_contexts: if c_id not in context_ids_already_searched: contexts_in_chain.append(c_id) context_ids_already_searched.append(c_id) if len(updates) == 0: return state_root tree = MerkleDatabase(self._database, state_root) virtual = not persist state_hash = tree.update(updates, virtual=virtual) if persist: # save the state changes to the state_delta_store changes = [ StateChange(address=addr, value=value, type=StateChange.SET) for addr, value in updates.items() ] self._state_delta_store.save_state_deltas(state_hash, changes) if clean_up: self.delete_contexts(context_ids_already_searched) return state_hash
def _check_merkle(state_root, context=''): # for testing # check state for testing try: tree = MerkleDatabase(self._database, state_root) except: LOGGER.debug('_CHECK: BAD STATE=%s ROOT %s\n', state_root[:8], context) return try: tree._get_by_addr( "449095bc5d9deba00a635d8db93c9deeb043416204f494b9f07862e9445559f0185109" ) LOGGER.debug('_CHECK: ADDRESS YES CHECK STATE=%s %s\n', state_root[:8], context) except: LOGGER.debug('_CHECK: ADDRESS NO CHECK STATE=%s %s\n', state_root[:8], context)
def compute_state_hashes_wo_scheduler(self, base_dir): """Creates a state hash from the state updates from each txn in a valid batch. Returns state_hashes (list of str): The merkle roots from state changes in 1 or more blocks in the yaml file. """ database = NativeLmdbDatabase( os.path.join(base_dir, 'compute_state_hashes_wo_scheduler.lmdb'), indexes=MerkleDatabase.create_index_configuration(), _size=10 * 1024 * 1024) tree = MerkleDatabase(database=database) state_hashes = [] updates = {} for batch in self._batches: b_id = batch.header_signature result = self._batch_results[b_id] if result.is_valid: for txn in batch.transactions: txn_id = txn.header_signature _, address_values, deletes = self._txn_execution[txn_id] batch_updates = {} for pair in address_values: batch_updates.update({a: pair[a] for a in pair.keys()}) # since this is entirely serial, any overwrite # of an address is expected and desirable. updates.update(batch_updates) for address in deletes: if address in updates: del updates[address] # This handles yaml files that have state roots in them if result.state_hash is not None: s_h = tree.update(set_items=updates, virtual=False) tree.set_merkle_root(merkle_root=s_h) state_hashes.append(s_h) if not state_hashes: state_hashes.append(tree.update(set_items=updates)) return state_hashes