def compute_state_hashes_wo_scheduler(self): """Creates a state hash from the state updates from each txn in a valid batch. Returns state_hashes (list of str): The merkle roots from state changes in 1 or more blocks in the yaml file. """ tree = MerkleDatabase(database=DictDatabase()) state_hashes = [] updates = {} for batch in self._batches: b_id = batch.header_signature result = self._batch_results[b_id] if result.is_valid: for txn in batch.transactions: txn_id = txn.header_signature _, address_values = self._txn_execution[txn_id] batch_updates = {} for pair in address_values: batch_updates.update({a: pair[a] for a in pair.keys()}) # since this is entirely serial, any overwrite # of an address is expected and desirable. updates.update(batch_updates) # This handles yaml files that have state roots in them if result.state_hash is not None: s_h = tree.update(set_items=updates, virtual=False) tree.set_merkle_root(merkle_root=s_h) state_hashes.append(s_h) if len(state_hashes) == 0: state_hashes.append(tree.update(set_items=updates)) return state_hashes
def compute_state_hashes_wo_scheduler(self): """Creates a state hash from the state updates from each txn in a valid batch. Returns state_hashes (list of str): The merkle roots from state changes in 1 or more blocks in the yaml file. """ tree = MerkleDatabase(database=DictDatabase()) state_hashes = [] updates = {} for batch in self._batches: b_id = batch.header_signature result = self._batch_results[b_id] if result.is_valid: for txn in batch.transactions: txn_id = txn.header_signature _, address_values = self._txn_execution[txn_id] batch_updates = {} for pair in address_values: batch_updates.update({a: pair[a] for a in pair.keys()}) # since this is entirely serial, any overwrite # of an address is expected and desirable. updates.update(batch_updates) # This handles yaml files that have state roots in them if result.state_hash is not None: s_h = tree.update(set_items=updates, virtual=False) tree.set_merkle_root(merkle_root=s_h) state_hashes.append(s_h) if not state_hashes: state_hashes.append(tree.update(set_items=updates)) return state_hashes
def _squash(state_root, context_ids, persist): tree = MerkleDatabase(self._database, state_root) updates = dict() for c_id in context_ids: context = self._contexts[c_id] for add in context.get_state().keys(): if add in updates: raise SquashException( "Duplicate address {} in context {}".format( add, c_id)) effective_updates = {} for k, val_fut in context.get_state().items(): value = val_fut.result() if value is not None: effective_updates[k] = value updates.update(effective_updates) if len(updates) == 0: return state_root virtual = not persist state_hash = tree.update(updates, virtual=virtual) if persist: # clean up all contexts that are involved in being squashed. base_c_ids = [] for c_id in context_ids: base_c_ids += self._contexts[c_id].base_context_ids all_context_ids = base_c_ids + context_ids self.delete_context(all_context_ids) return state_hash
def _squash(state_root, context_ids, persist, clean_up): contexts_in_chain = deque() contexts_in_chain.extend(context_ids) context_ids_already_searched = [] context_ids_already_searched.extend(context_ids) # There is only one exit condition and that is when all the # contexts have been accessed once. updates = dict() deletes = set() while contexts_in_chain: current_c_id = contexts_in_chain.popleft() current_context = self._contexts[current_c_id] if not current_context.is_read_only(): current_context.make_read_only() addresses_w_values = current_context.get_all_if_set() for add, val in addresses_w_values.items(): # Since we are moving backwards through the graph of # contexts, only update if the address hasn't been set # or deleted if add not in updates and add not in deletes: updates[add] = val addresses_w_values = current_context.get_all_if_deleted() for add, _ in addresses_w_values.items(): # Since we are moving backwards through the graph of # contexts, only add to deletes if the address hasn't been # previously deleted or set in the graph if add not in updates and add not in deletes: deletes.add(add) for c_id in current_context.base_contexts: if c_id not in context_ids_already_searched: contexts_in_chain.append(c_id) context_ids_already_searched.append(c_id) tree = MerkleDatabase(self._database, state_root) # filter the delete list to just those items in the tree deletes = [addr for addr in deletes if addr in tree] if not updates and not deletes: return state_root virtual = not persist state_hash = tree.update(updates, deletes, virtual=virtual) if persist: # save the state changes to the state_delta_store changes = [StateChange(address=addr, value=value, type=StateChange.SET) for addr, value in updates.items()] +\ [StateChange(address=addr, type=StateChange.DELETE) for addr in deletes] self._state_delta_store.save_state_deltas(state_hash, changes) if clean_up: self.delete_contexts(context_ids_already_searched) return state_hash
def make_db_and_store(size=3, start='a'): """ Creates and returns three related objects for testing: * database - dict database with evolving state * store - blocks with root hashes corresponding to that state * roots - list of root hashes used in order With defaults, the values at the three roots look like this: * 0 - {'a': b'1'} * 1 - {'a': b'2', 'b': b'4'} * 2 - {'a': b'3', 'b': b'5', 'c': b'7'} """ database = DictDatabase() store = MockBlockStore(size=0) roots = [] merkle = MerkleDatabase(database) data = {} for i in range(size): for k, v in data.items(): data[k] = str(int(v) + 1).encode() data[_increment_key(start, i)] = str(i * size + 1).encode() root = merkle.update(data, virtual=False) roots.append(root) store.add_block(str(i), root) return database, store, roots
def test_squash(self): """Tests that squashing a context based on state from other contexts will result in the same merkle hash as updating the merkle tree with the same data. Notes: Set up the context Test: 1) Make set calls on several of the addresses. 2) Squash the context to get a new state hash. 3) Apply all of the aggregate sets from all of the contexts, to another database with a merkle tree. 4) Assert that the state hashes are the same. 5) Assert that the state deltas have been stored """ # 1) context_id = self._setup_context() self.context_manager.set( context_id, [{self._create_address(a): v} for a, v in [('yyyy', b'2'), ('tttt', b'4')]]) # 2) squash = self.context_manager.get_squash_handler() resulting_state_hash = squash(self.first_state_hash, [context_id], persist=True, clean_up=True) # 3) final_state_to_update = {self._create_address(a): v for a, v in [('llaa', b'1'), ('aall', b'2'), ('nnnn', b'3'), ('zzzz', b'9'), ('yyyy', b'2'), ('tttt', b'4'), ('qqqq', b'13'), ('oooo', b'25'), ('oozz', b'26'), ('zzoo', b'27'), ('ppoo', b'28'), ('aeio', b'29')]} test_merkle_tree = MerkleDatabase(self.database_results) test_resulting_state_hash = test_merkle_tree.update( final_state_to_update, virtual=False) # 4) self.assertEqual(resulting_state_hash, test_resulting_state_hash) state_changes = self.state_delta_store.get_state_deltas( resulting_state_hash) # 5) for addr, value in final_state_to_update.items(): expected_state_change = StateChange( address=addr, value=value, type=StateChange.SET) self.assertTrue(expected_state_change in state_changes)
def make_db_and_store(size=3, start='a'): """ Creates and returns three related objects for testing: * database - dict database with evolving state * store - blocks with root hashes corresponding to that state * roots - list of root hashes used in order With defaults, the values at the three roots look like this: * 0 - {'a': b'1'} * 1 - {'a': b'2', 'b': b'4'} * 2 - {'a': b'3', 'b': b'5', 'c': b'7'} """ database = DictDatabase() store = MockBlockStore(size=0); roots = [] merkle = MerkleDatabase(database) data = {} for i in range(size): for k, v in data.items(): data[k] = str(int(v) + 1).encode() data[_increment_key(start, i)] = str(i * size + 1).encode() root = merkle.update(data, virtual=False) roots.append(root) store.add_block(str(i), root) return database, store, roots
def make_db_and_store(size=3): database = DictDatabase() store = MockBlockStore(size=0) roots = [] merkle = MerkleDatabase(database) data = {} # Create all the keys that will be used. Keys are zero-padded hex strings # starting with '1'. keys = [format(i, 'x').zfill(70) for i in range(1, size + 1)] for i in range(1, size + 1): # Construct the state for this root data = {} for key_idx in range(i): key = keys[key_idx] # Calculate unique values based on the key and root val = i + (2 * key_idx) data[key] = str(val).encode() root = merkle.update(data, virtual=False) roots.append(root) store.add_block(str(i), root) return database, store, roots
def compute_state_hashes_wo_scheduler(self, base_dir): """Creates a state hash from the state updates from each txn in a valid batch. Returns state_hashes (list of str): The merkle roots from state changes in 1 or more blocks in the yaml file. """ database = NativeLmdbDatabase( os.path.join(base_dir, 'compute_state_hashes_wo_scheduler.lmdb'), indexes=MerkleDatabase.create_index_configuration(), _size=10 * 1024 * 1024) tree = MerkleDatabase(database=database) state_hashes = [] updates = {} for batch in self._batches: b_id = batch.header_signature result = self._batch_results[b_id] if result.is_valid: for txn in batch.transactions: txn_id = txn.header_signature _, address_values, deletes = self._txn_execution[txn_id] batch_updates = {} for pair in address_values: batch_updates.update({a: pair[a] for a in pair.keys()}) # since this is entirely serial, any overwrite # of an address is expected and desirable. updates.update(batch_updates) for address in deletes: if address in updates: del updates[address] # This handles yaml files that have state roots in them if result.state_hash is not None: s_h = tree.update(set_items=updates, virtual=False) tree.set_merkle_root(merkle_root=s_h) state_hashes.append(s_h) if not state_hashes: state_hashes.append(tree.update(set_items=updates)) return state_hashes
def compute_state_hashes_wo_scheduler(self, base_dir): """Creates a state hash from the state updates from each txn in a valid batch. Returns state_hashes (list of str): The merkle roots from state changes in 1 or more blocks in the yaml file. """ database = NativeLmdbDatabase( os.path.join(base_dir, 'compute_state_hashes_wo_scheduler.lmdb'), indexes=MerkleDatabase.create_index_configuration(), _size=10 * 1024 * 1024) tree = MerkleDatabase(database=database) state_hashes = [] updates = {} for batch in self._batches: b_id = batch.header_signature result = self._batch_results[b_id] if result.is_valid: for txn in batch.transactions: txn_id = txn.header_signature _, address_values, deletes = self._txn_execution[txn_id] batch_updates = {} for pair in address_values: batch_updates.update({a: pair[a] for a in pair.keys()}) # since this is entirely serial, any overwrite # of an address is expected and desirable. updates.update(batch_updates) for address in deletes: if address in updates: del updates[address] # This handles yaml files that have state roots in them if result.state_hash is not None: s_h = tree.update(set_items=updates, virtual=False) tree.set_merkle_root(merkle_root=s_h) state_hashes.append(s_h) if not state_hashes: state_hashes.append(tree.update(set_items=updates)) return state_hashes
class TestSawtoothMerkleTrie: def __init__(self): self.dir = '/tmp/sawtooth' # tempfile.mkdtemp() self.file = os.path.join(self.dir, 'merkle.lmdb') self.lmdb = lmdb_nolock_database.LMDBNoLockDatabase(self.file, 'n') self.trie = MerkleDatabase(self.lmdb) def __enter__(self): return self def __exit__(self, exc_type, exc_value, traceback): self.trie.close() # assertions def assert_value_at_address(self, address, value, ishash=False): assert self.get(address, ishash) == value def assert_no_key(self, key): with pytest.raises(KeyError): self.get(key) def assert_root(self, expected): assert expected == self.get_merkle_root() def assert_not_root(self, *not_roots): root = self.get_merkle_root() for not_root in not_roots: assert root != not_root # trie accessors # For convenience, assume keys are not hashed # unless otherwise indicated. def set(self, key, val, ishash=False): key_ = key if ishash else _hash(key) return self.trie.set(key_, val) def get(self, key, ishash=False): key_ = key if ishash else _hash(key) return self.trie.get(key_) def delete(self, key, ishash=False): key_ = key if ishash else _hash(key) return self.trie.delete(key_) def set_merkle_root(self, root): self.trie.set_merkle_root(root) def get_merkle_root(self): return self.trie.get_merkle_root() def update(self, set_items, delete_items=None, virtual=True): return self.trie.update(set_items, delete_items, virtual=virtual)
def test_squash(self): """Tests that squashing a context based on state from other contexts will result in the same merkle hash as updating the merkle tree with the same data. Notes: Set up the context Test: 1) Make set calls on several of the addresses. 2) Squash the context to get a new state hash. 3) Apply all of the aggregate sets from all of the contexts, to another database with a merkle tree. 4) Assert that the state hashes are the same. 5) Assert that the state deltas have been stored """ # 1) context_id = self._setup_context() self.context_manager.set(context_id, [{ self._create_address(a): v } for a, v in [('yyyy', b'2'), ('tttt', b'4')]]) # 2) squash = self.context_manager.get_squash_handler() resulting_state_hash = squash(self.first_state_hash, [context_id], persist=True, clean_up=True) # 3) final_state_to_update = { self._create_address(a): v for a, v in [('llaa', b'1'), ('aall', b'2'), ( 'nnnn', b'3'), ('zzzz', b'9'), ('yyyy', b'2'), ( 'tttt', b'4'), ('qqqq', b'13'), ('oooo', b'25'), ( 'oozz', b'26'), ('zzoo', b'27'), ('ppoo', b'28'), ('aeio', b'29')] } test_merkle_tree = MerkleDatabase(self.database_results) test_resulting_state_hash = test_merkle_tree.update( final_state_to_update, virtual=False) # 4) self.assertEqual(resulting_state_hash, test_resulting_state_hash) state_changes = self.state_delta_store.get_state_deltas( resulting_state_hash) # 5) for addr, value in final_state_to_update.items(): expected_state_change = StateChange(address=addr, value=value, type=StateChange.SET) self.assertTrue(expected_state_change in state_changes)
def commit_context(self, context_id_list, virtual): """ Only used in a test --- Commits the state from the contexts referred to in context_id_list to the merkle tree. Args: context_id_list (list of str): The context ids with state to commit to the merkle tree. virtual (bool): True if the data in contexts shouldn't be written to the merkle tree, but just return a merkle root. Returns: state_hash (str): the new state hash after the context_id_list has been committed """ if any([c_id not in self._contexts for c_id in context_id_list]): raise CommitException("Context Id not in contexts") first_id = context_id_list[0] if not all([ self._contexts[first_id].merkle_root == self._contexts[c_id].merkle_root for c_id in context_id_list ]): raise CommitException( "MerkleRoots not all equal, yet asking to merge") merkle_root = self._contexts[first_id].merkle_root tree = MerkleDatabase(self._database, merkle_root=merkle_root) updates = dict() for c_id in context_id_list: context = self._contexts[c_id] for add in context.get_state().keys(): if add in updates: raise CommitException( "Duplicate address {} in context {}".format(add, c_id)) effective_updates = {} for k, val_fut in context.get_state().items(): value = val_fut.result() if value is not None: effective_updates[k] = value updates.update(effective_updates) state_hash = tree.update(updates, virtual=False) # clean up all contexts that are involved in being squashed. base_c_ids = [] for c_id in context_id_list: base_c_ids += self._contexts[c_id].base_context_ids all_context_ids = base_c_ids + context_id_list self.delete_context(all_context_ids) return state_hash
def _squash(state_root, context_ids, persist, clean_up): contexts_in_chain = deque() contexts_in_chain.extend(context_ids) context_ids_already_searched = [] context_ids_already_searched.extend(context_ids) # There is only one exit condition and that is when all the # contexts have been accessed once. updates = dict() deletes = set() while contexts_in_chain: current_c_id = contexts_in_chain.popleft() current_context = self._contexts[current_c_id] if not current_context.is_read_only(): current_context.make_read_only() addresses_w_values = current_context.get_all_if_set() for add, val in addresses_w_values.items(): # Since we are moving backwards through the graph of # contexts, only update if the address hasn't been set # or deleted if add not in updates and add not in deletes: updates[add] = val addresses_w_values = current_context.get_all_if_deleted() for add, _ in addresses_w_values.items(): # Since we are moving backwards through the graph of # contexts, only add to deletes if the address hasn't been # previously deleted or set in the graph if add not in updates and add not in deletes: deletes.add(add) for c_id in current_context.base_contexts: if c_id not in context_ids_already_searched: contexts_in_chain.append(c_id) context_ids_already_searched.append(c_id) tree = MerkleDatabase(self._database, state_root) # filter the delete list to just those items in the tree deletes = [addr for addr in deletes if addr in tree] if not updates and not deletes: state_hash = state_root else: virtual = not persist state_hash = tree.update(updates, deletes, virtual=virtual) if clean_up: self.delete_contexts(context_ids_already_searched) return state_hash
def setUp(self): database = DictDatabase() state_view_factory = StateViewFactory(database) self._config_view_factory = ConfigViewFactory(state_view_factory) merkle_db = MerkleDatabase(database) self._current_root_hash = merkle_db.update({ TestConfigView._address('my.setting'): TestConfigView._setting_entry('my.setting', '10'), TestConfigView._address('my.setting.list'): TestConfigView._setting_entry('my.setting.list', '10,11,12'), TestConfigView._address('my.other.list'): TestConfigView._setting_entry('my.other.list', '13;14;15') }, virtual=False)
def setUp(self): database = DictDatabase() state_view_factory = StateViewFactory(database) self._settings_view_factory = SettingsViewFactory(state_view_factory) merkle_db = MerkleDatabase(database) self._current_root_hash = merkle_db.update({ TestSettingsView._address('my.setting'): TestSettingsView._setting_entry('my.setting', '10'), TestSettingsView._address('my.setting.list'): TestSettingsView._setting_entry('my.setting.list', '10,11,12'), TestSettingsView._address('my.other.list'): TestSettingsView._setting_entry('my.other.list', '13;14;15') }, virtual=False)
def test_squash(self): """Tests that squashing a context based on state from other contexts will result in the same merkle hash as updating the merkle tree with the same data. Notes: Set up the context Test: 1) Make set calls on several of the addresses. 2) Squash the context to get a new state hash. 3) Apply all of the aggregate sets from all of the contexts, to another database with a merkle tree. 4) Assert that the state hashes are the same. 5) Assert that the state deltas have been stored """ # 1) context_id = self._setup_context() self.context_manager.set(context_id, [{'bbbb': b'2'}, {'eeee': b'4'}]) # 2) squash = self.context_manager.get_squash_handler() resulting_state_hash = squash(self.first_state_hash, [context_id], persist=True) # 3) final_state_to_update = { 'aaaa': b'25', 'bbbb': b'2', 'cccc': b'27', 'dddd': b'28', 'eeee': b'4' } test_merkle_tree = MerkleDatabase(self.database_results) test_resulting_state_hash = test_merkle_tree.update( final_state_to_update, virtual=False) # 4) self.assertEqual(resulting_state_hash, test_resulting_state_hash) state_changes = self.state_delta_store.get_state_deltas( resulting_state_hash) # 5) for addr, value in final_state_to_update.items(): expected_state_change = StateChange(address=addr, value=value, type=StateChange.SET) self.assertTrue(expected_state_change in state_changes)
def _recompute_state_hash(state_root, context=None): # for DAG only - recompute state state_hash = None try: tree = MerkleDatabase(self._database, state_root) state_hash = tree.update(context['updates'], context['deletes'], virtual=True) LOGGER.debug('_recompute_state_hash: STATE=%s->%s\n', state_root[:8], state_hash[:8]) except: LOGGER.debug('_recompute_state_hash: BAD STATE=%s\n', state_root[:8]) return state_hash
def _squash(state_root, context_ids): tree = MerkleDatabase(self._database, state_root) updates = dict() for c_id in context_ids: with self._shared_lock: context = self._contexts[c_id] for add in context.get_address_value_dict().keys(): if add in updates: raise SquashException( "Duplicate address {} in context {}".format( add, c_id)) updates.update({k: v.result() for k, v in context.get_address_value_dict().items()}) state_hash = tree.update(updates, virtual=False) return state_hash
def commit_context(self, context_id_list, virtual): """ Part of the interface to the Executor Args: context_id_list: Returns: state_hash (str): the new state hash after the context_id_list has been committed """ if any([c_id not in self._contexts for c_id in context_id_list]): raise CommitException("Context Id not in contexts") first_id = context_id_list[0] if not all([ self._contexts[first_id].merkle_root == self._contexts[c_id].merkle_root for c_id in context_id_list ]): raise CommitException( "MerkleRoots not all equal, yet asking to merge") merkle_root = self._contexts[first_id].merkle_root tree = MerkleDatabase(self._database, merkle_root) merged_updates = {} for c_id in context_id_list: with self._shared_lock: context = self._contexts[c_id] del self._contexts[c_id] for k in context.get_writable_address_value_dict().keys(): if k in merged_updates: raise CommitException( "Duplicate address {} in context {}".format(k, c_id)) merged_updates.update(context.get_writable_address_value_dict()) new_root = merkle_root add_value_dict = {} for k, val_fut in merged_updates.items(): value = val_fut.result() if value is not None: add_value_dict[k] = value new_root = tree.update(set_items=add_value_dict, virtual=virtual) return new_root
def _squash(state_root, context_ids): tree = MerkleDatabase(self._database, state_root) updates = dict() for c_id in context_ids: with self._shared_lock: context = self._contexts[c_id] for add in context.get_address_value_dict().keys(): if add in updates: raise SquashException( "Duplicate address {} in context {}".format( add, c_id)) updates.update({ k: v.result() for k, v in context.get_address_value_dict().items() }) state_hash = tree.update(updates, virtual=False) return state_hash
def setUp(self): self._temp_dir = tempfile.mkdtemp() database = NativeLmdbDatabase( os.path.join(self._temp_dir, 'test_config_view.lmdb'), _size=10 * 1024 * 1024) state_view_factory = StateViewFactory(database) self._settings_view_factory = SettingsViewFactory(state_view_factory) merkle_db = MerkleDatabase(database) self._current_root_hash = merkle_db.update({ TestSettingsView._address('my.setting'): TestSettingsView._setting_entry('my.setting', '10'), TestSettingsView._address('my.setting.list'): TestSettingsView._setting_entry('my.setting.list', '10,11,12'), TestSettingsView._address('my.other.list'): TestSettingsView._setting_entry('my.other.list', '13;14;15') }, virtual=False)
def _squash(state_root, context_ids, persist, clean_up): contexts_in_chain = deque() contexts_in_chain.extend(context_ids) context_ids_already_searched = [] context_ids_already_searched.extend(context_ids) # There is only one exit condition and that is when all the # contexts have been accessed once. updates = dict() while len(contexts_in_chain) > 0: current_c_id = contexts_in_chain.popleft() current_context = self._contexts[current_c_id] if not current_context.is_read_only(): current_context.make_read_only() addresses_w_values = current_context.get_all_if_set() for add, val in addresses_w_values.items(): # Since we are moving backwards through the graph of # contexts, only update if the address hasn't been set if add not in updates: updates[add] = val for c_id in current_context.base_contexts: if c_id not in context_ids_already_searched: contexts_in_chain.append(c_id) context_ids_already_searched.append(c_id) if len(updates) == 0: return state_root tree = MerkleDatabase(self._database, state_root) virtual = not persist state_hash = tree.update(updates, virtual=virtual) if persist: # save the state changes to the state_delta_store changes = [ StateChange(address=addr, value=value, type=StateChange.SET) for addr, value in updates.items() ] self._state_delta_store.save_state_deltas(state_hash, changes) if clean_up: self.delete_contexts(context_ids_already_searched) return state_hash
def setUp(self): self._temp_dir = tempfile.mkdtemp() database = NativeLmdbDatabase( os.path.join(self._temp_dir, 'test_config_view.lmdb'), indexes=MerkleDatabase.create_index_configuration(), _size=10 * 1024 * 1024) state_view_factory = StateViewFactory(database) self._settings_view_factory = SettingsViewFactory(state_view_factory) merkle_db = MerkleDatabase(database) self._current_root_hash = merkle_db.update({ TestSettingsView._address('my.setting'): TestSettingsView._setting_entry('my.setting', '10'), TestSettingsView._address('my.setting.list'): TestSettingsView._setting_entry('my.setting.list', '10,11,12'), TestSettingsView._address('my.other.list'): TestSettingsView._setting_entry('my.other.list', '13;14;15') }, virtual=False)
def _squash(state_root, context_ids, persist, clean_up): contexts_in_chain = deque() contexts_in_chain.extend(context_ids) context_ids_already_searched = [] context_ids_already_searched.extend(context_ids) # There is only one exit condition and that is when all the # contexts have been accessed once. updates = dict() while contexts_in_chain: current_c_id = contexts_in_chain.popleft() current_context = self._contexts[current_c_id] if not current_context.is_read_only(): current_context.make_read_only() addresses_w_values = current_context.get_all_if_set() for add, val in addresses_w_values.items(): # Since we are moving backwards through the graph of # contexts, only update if the address hasn't been set if add not in updates: updates[add] = val for c_id in current_context.base_contexts: if c_id not in context_ids_already_searched: contexts_in_chain.append(c_id) context_ids_already_searched.append(c_id) if not updates: return state_root tree = MerkleDatabase(self._database, state_root) virtual = not persist state_hash = tree.update(updates, virtual=virtual) if persist: # save the state changes to the state_delta_store changes = [StateChange(address=addr, value=value, type=StateChange.SET) for addr, value in updates.items()] self._state_delta_store.save_state_deltas(state_hash, changes) if clean_up: self.delete_contexts(context_ids_already_searched) return state_hash
def commit_context(self, context_id_list, virtual): """ Part of the interface to the Executor Args: context_id_list: Returns: state_hash (str): the new state hash after the context_id_list has been committed """ if any([c_id not in self._contexts for c_id in context_id_list]): raise CommitException("Context Id not in contexts") first_id = context_id_list[0] if not all([self._contexts[first_id].merkle_root == self._contexts[c_id].merkle_root for c_id in context_id_list]): raise CommitException( "MerkleRoots not all equal, yet asking to merge") merkle_root = self._contexts[first_id].merkle_root tree = MerkleDatabase(self._database, merkle_root) merged_updates = {} for c_id in context_id_list: with self._shared_lock: context = self._contexts[c_id] del self._contexts[c_id] for k in context.get_writable_address_value_dict().keys(): if k in merged_updates: raise CommitException( "Duplicate address {} in context {}".format(k, c_id)) merged_updates.update(context.get_writable_address_value_dict()) new_root = merkle_root add_value_dict = {address: value.result() for address, value in merged_updates.items()} new_root = tree.update(set_items=add_value_dict, virtual=virtual) return new_root
def make_db_and_store(base_dir, size=3): """ Creates and returns three related objects for testing: * database - dict database with evolving state * store - blocks with root hashes corresponding to that state * roots - list of root hashes used in order With defaults, the values at the three roots look like this: * 0 - {'000...1': b'1'} * 1 - {'000...1': b'2', '000...2': b'4'} * 2 - {'000...1': b'3', '000...2': b'5', '000...3': b'7'} * 3 - {'000...1': b'4', '000...2': b'6', '000...3': b'8', '000...4': b'10'} """ database = NativeLmdbDatabase( os.path.join(base_dir, 'client_handlers_mock_db.lmdb'), indexes=MerkleDatabase.create_index_configuration(), _size=10 * 1024 * 1024) store = MockBlockStore(size=0) roots = [] merkle = MerkleDatabase(database) data = {} # Create all the keys that will be used. Keys are zero-padded hex strings # starting with '1'. keys = [format(i, 'x').zfill(70) for i in range(1, size + 1)] for i in range(1, size + 1): # Construct the state for this root data = {} for key_idx in range(i): key = keys[key_idx] # Calculate unique values based on the key and root val = i + (2 * key_idx) data[key] = str(val).encode() root = merkle.update(data, virtual=False) roots.append(root) store.add_block(str(i), root) return database, store, roots
def make_db_and_store(base_dir, size=3): """ Creates and returns three related objects for testing: * database - dict database with evolving state * store - blocks with root hashes corresponding to that state * roots - list of root hashes used in order With defaults, the values at the three roots look like this: * 0 - {'000...1': b'1'} * 1 - {'000...1': b'2', '000...2': b'4'} * 2 - {'000...1': b'3', '000...2': b'5', '000...3': b'7'} * 3 - {'000...1': b'4', '000...2': b'6', '000...3': b'8', '000...4': b'10'} """ database = NativeLmdbDatabase( os.path.join(base_dir, 'client_handlers_mock_db.lmdb'), indexes=MerkleDatabase.create_index_configuration(), _size=10 * 1024 * 1024) store = MockBlockStore(size=0) roots = [] merkle = MerkleDatabase(database) data = {} # Create all the keys that will be used. Keys are zero-padded hex strings # starting with '1'. keys = [format(i, 'x').zfill(70) for i in range(1, size + 1)] for i in range(1, size + 1): # Construct the state for this root data = {} for key_idx in range(i): key = keys[key_idx] # Calculate unique values based on the key and root val = i + (2 * key_idx) data[key] = str(val).encode() root = merkle.update(data, virtual=False) roots.append(root) store.add_block(str(i), root) return database, store, roots
def test_state_view(self): """Tests the StateViewFactory and its creation of StateViews This test exercises the following: 1. Create an empty merkle database. 2. Create a view into the database, asserting its emptiness. 3. Update the database with a value, creating a new root. 4. Create a view into the database with the new root. 5. Verify the view does not match the previous view and contains the new item. """ merkle_db = MerkleDatabase(self.database) state_view_factory = StateViewFactory(self.database) initial_state_view = state_view_factory.create_view( merkle_db.get_merkle_root()) # test that the initial state view returns empty values self.assertEqual([], initial_state_view.addresses()) self.assertEqual({}, {k: v for k, v in initial_state_view.leaves('')}) with self.assertRaises(KeyError): initial_state_view.get('abcd') next_root = merkle_db.update({'abcd': 'hello'.encode()}, virtual=False) next_state_view = state_view_factory.create_view(next_root) # Prove that the initial state view is not effected by the change self.assertEqual([], initial_state_view.addresses()) self.assertEqual(['abcd'], next_state_view.addresses()) # Check that the values can be properly read back self.assertEqual('hello', next_state_view.get('abcd').decode()) self.assertEqual({'abcd': 'hello'.encode()}, {k: v for k, v in next_state_view.leaves('')})
def _squash(state_root, context_ids): tree = MerkleDatabase(self._database, state_root) updates = dict() for c_id in context_ids: with self._shared_lock: context = self._contexts[c_id] for add in context.get_address_value_dict().keys(): if add in updates: raise SquashException( "Duplicate address {} in context {}".format( add, c_id)) effective_updates = {} for k, val_fut in context.get_address_value_dict().items(): value = val_fut.result() if value is not None: effective_updates[k] = value updates.update(effective_updates) state_hash = tree.update(updates, virtual=False) return state_hash
def test_squash(self): """Tests that squashing a context based on state from other contexts will result in the same merkle hash as updating the merkle tree with the same data. Notes: Set up the context Test: 1) Make set calls on several of the addresses. 2) Squash the context to get a new state hash. 3) Apply all of the aggregate sets from all of the contexts, to another database with a merkle tree. 4) Assert that the state hashes are the same. """ # 1) context_id = self._setup_context() self.context_manager.set(context_id, [{'bbbb': b'2'}, {'eeee': b'4'}]) # 2) squash = self.context_manager.get_squash_handler() resulting_state_hash = squash(self.first_state_hash, [context_id]) # 3) final_state_to_update = { 'aaaa': b'25', 'bbbb': b'2', 'cccc': b'27', 'dddd': b'28', 'eeee': b'4' } test_merkle_tree = MerkleDatabase(self.database_results) test_resulting_state_hash = test_merkle_tree.update( final_state_to_update, virtual=False) # 4) self.assertEqual(resulting_state_hash, test_resulting_state_hash)
def _add_valid_batch_invalid_batch(self, scheduler, context_manager): """Tests the squash function. That the correct state hash is found at the end of valid and invalid batches, similar to block publishing. Basically: 1. Adds two batches, one where all the txns are valid, and one where one of the txns is invalid. 2. Run through the scheduler executor interaction as txns are processed. 3. Verify that the state root obtained through the squash function is the same as directly updating the merkle tree. 4. Verify that correct batch statuses are set This test should work for both a serial and parallel scheduler. """ private_key = signing.generate_privkey() public_key = signing.generate_pubkey(private_key) # 1) batch_signatures = [] for names in [['a', 'b'], ['invalid', 'c'], ['d', 'e']]: batch_txns = [] for name in names: txn, _ = create_transaction(payload=name.encode(), private_key=private_key, public_key=public_key) batch_txns.append(txn) batch = create_batch(transactions=batch_txns, private_key=private_key, public_key=public_key) batch_signatures.append(batch.header_signature) scheduler.add_batch(batch) scheduler.finalize() # 2) sched1 = iter(scheduler) invalid_payload = hashlib.sha512('invalid'.encode()).hexdigest() while not scheduler.complete(block=False): txn_info = next(sched1) txn_header = transaction_pb2.TransactionHeader() txn_header.ParseFromString(txn_info.txn.header) inputs_or_outputs = list(txn_header.inputs) c_id = context_manager.create_context( state_hash=txn_info.state_hash, inputs=inputs_or_outputs, outputs=inputs_or_outputs, base_contexts=txn_info.base_context_ids) if txn_header.payload_sha512 == invalid_payload: scheduler.set_transaction_execution_result( txn_info.txn.header_signature, False, None) else: context_manager.set(c_id, [{inputs_or_outputs[0]: b"1"}]) scheduler.set_transaction_execution_result( txn_info.txn.header_signature, True, c_id) sched2 = iter(scheduler) # 3) txn_info_a = next(sched2) txn_a_header = transaction_pb2.TransactionHeader() txn_a_header.ParseFromString(txn_info_a.txn.header) inputs_or_outputs = list(txn_a_header.inputs) address_a = inputs_or_outputs[0] txn_info_b = next(sched2) address_b = _get_address_from_txn(txn_info_b) txn_infoInvalid = next(sched2) txn_info_c = next(sched2) txn_info_d = next(sched2) address_d = _get_address_from_txn(txn_info_d) txn_info_e = next(sched2) address_e = _get_address_from_txn(txn_info_e) merkle_database = MerkleDatabase(dict_database.DictDatabase()) state_root_end = merkle_database.update( { address_a: b"1", address_b: b"1", address_d: b"1", address_e: b"1" }, virtual=False) # 4) batch1_result = scheduler.get_batch_execution_result( batch_signatures[0]) self.assertTrue(batch1_result.is_valid) batch2_result = scheduler.get_batch_execution_result( batch_signatures[1]) self.assertFalse(batch2_result.is_valid) batch3_result = scheduler.get_batch_execution_result( batch_signatures[2]) self.assertTrue(batch3_result.is_valid) self.assertEqual(batch3_result.state_hash, state_root_end)
class TestSawtoothMerkleTrie(unittest.TestCase): def setUp(self): self.dir = tempfile.mkdtemp() self.file = os.path.join(self.dir, 'merkle.lmdb') self.lmdb = NativeLmdbDatabase( self.file, indexes=MerkleDatabase.create_index_configuration(), _size=120 * 1024 * 1024) self.trie = MerkleDatabase(self.lmdb) def tearDown(self): self.trie.close() shutil.rmtree(self.dir) def test_merkle_trie_root_advance(self): value = {'name': 'foo', 'value': 1} orig_root = self.get_merkle_root() new_root = self.set('foo', value) self.assert_root(orig_root) self.assert_no_key('foo') self.set_merkle_root(new_root) self.assert_root(new_root) self.assert_value_at_address('foo', value) def test_merkle_trie_delete(self): value = {'name': 'bar', 'value': 1} new_root = self.set('bar', value) self.set_merkle_root(new_root) self.assert_root(new_root) self.assert_value_at_address('bar', value) # deleting an invalid key should raise an error with self.assertRaises(KeyError): self.delete('barf') del_root = self.delete('bar') # del_root hasn't been set yet, so address should still have value self.assert_root(new_root) self.assert_value_at_address('bar', value) self.set_merkle_root(del_root) self.assert_root(del_root) self.assert_no_key('bar') def test_merkle_trie_update(self): init_root = self.get_merkle_root() values = {} key_hashes = { key: _hash(key) for key in (_random_string(10) for _ in range(1000)) } for key, hashed in key_hashes.items(): value = {key: _random_string(512)} new_root = self.set(hashed, value, ishash=True) values[hashed] = value self.set_merkle_root(new_root) self.assert_not_root(init_root) for address, value in values.items(): self.assert_value_at_address(address, value, ishash=True) set_items = { hashed: { key: 5.0 } for key, hashed in random.sample(key_hashes.items(), 50) } values.update(set_items) delete_items = { hashed for hashed in random.sample(list(key_hashes.values()), 50) } # make sure there are no sets and deletes of the same key delete_items = delete_items - set_items.keys() for addr in delete_items: del values[addr] virtual_root = self.update(set_items, delete_items, virtual=True) # virtual root shouldn't match actual contents of tree with self.assertRaises(KeyError): self.set_merkle_root(virtual_root) actual_root = self.update(set_items, delete_items, virtual=False) # the virtual root should be the same as the actual root self.assertEqual(virtual_root, actual_root) # neither should be the root yet self.assert_not_root(virtual_root, actual_root) self.set_merkle_root(actual_root) self.assert_root(actual_root) for address, value in values.items(): self.assert_value_at_address(address, value, ishash=True) for address in delete_items: with self.assertRaises(KeyError): self.get(address, ishash=True) def test_merkle_trie_leaf_iteration(self): new_root = self.update( { "010101": { "my_data": 1 }, "010202": { "my_data": 2 }, "010303": { "my_data": 3 } }, [], virtual=False) # iterate over the empty trie iterator = iter(self.trie) with self.assertRaises(StopIteration): next(iterator) self.set_merkle_root(new_root) # Test complete trie iteration self.assertEqual([("010101", { "my_data": 1 }), ("010202", { "my_data": 2 }), ("010303", { "my_data": 3 })], [entry for entry in iter(self.trie)]) # Test prefixed iteration self.assertEqual([("010202", { "my_data": 2 })], [entry for entry in self.trie.leaves('0102')]) # assertions def assert_value_at_address(self, address, value, ishash=False): self.assertEqual(self.get(address, ishash), value, 'Wrong value') def assert_no_key(self, key): with self.assertRaises(KeyError): self.get(key) def assert_root(self, expected): self.assertEqual(expected, self.get_merkle_root(), 'Wrong root') def assert_not_root(self, *not_roots): root = self.get_merkle_root() for not_root in not_roots: self.assertNotEqual(root, not_root, 'Wrong root') # trie accessors # For convenience, assume keys are not hashed # unless otherwise indicated. def set(self, key, val, ishash=False): key_ = key if ishash else _hash(key) return self.trie.set(key_, val) def get(self, key, ishash=False): key_ = key if ishash else _hash(key) return self.trie.get(key_) def delete(self, key, ishash=False): key_ = key if ishash else _hash(key) return self.trie.delete(key_) def set_merkle_root(self, root): self.trie.set_merkle_root(root) def get_merkle_root(self): return self.trie.get_merkle_root() def update(self, set_items, delete_items=None, virtual=True): return self.trie.update(set_items, delete_items, virtual=virtual)
def test_state_root_after_parallel_ctx(self): """Tests that the correct state root is calculated after basing one context off of multiple contexts. i=abcd o=aaaa +>context_1+ | aaaa=1 | | | i=llll | i=bacd | i=bbbb,aaaa o=llll | o=bbbb | o=cccc,llll sh0--->ctx_0-->sh1>|-->context_2-+---->context_n---->sh2 llll=5 | bbbb=2 | cccc=4 | | llll=8 | i=abcd | | o=cccc | +>context_3+ cccc=3 Notes: Test: 1. Create a context, set a value in it and squash it into a new state hash. 2. Create 3 contexts based off of the state root from #1. 3. Set values at addresses to all three contexts. 4. Base another context off of the contexts from #2. 5. Set a value to an address in this context that has already been set to in the non-base context. 6. Squash the contexts producing a state hash and assert that it equals a state hash obtained by manually updating the merkle tree. """ sh0 = self.first_state_hash # 1) squash = self.context_manager.get_squash_handler() ctx_1 = self.context_manager.create_context( state_hash=sh0, base_contexts=[], inputs=[self._create_address('llll')], outputs=[self._create_address('llll')]) self.context_manager.set(context_id=ctx_1, address_value_list=[{ self._create_address('llll'): b'5' }]) sh1 = squash(state_root=sh0, context_ids=[ctx_1], persist=True, clean_up=True) # 2) context_1 = self.context_manager.create_context( state_hash=sh1, base_contexts=[], inputs=[self._create_address('abcd')], outputs=[self._create_address('aaaa')]) context_2 = self.context_manager.create_context( state_hash=sh1, base_contexts=[], inputs=[self._create_address('bacd')], outputs=[self._create_address('bbbb')]) context_3 = self.context_manager.create_context( state_hash=sh1, base_contexts=[], inputs=[self._create_address('abcd')], outputs=[ self._create_address('cccc'), self._create_address('dddd') ]) # 3) self.context_manager.set(context_id=context_1, address_value_list=[{ self._create_address('aaaa'): b'1' }]) self.context_manager.set(context_id=context_2, address_value_list=[{ self._create_address('bbbb'): b'2' }]) self.context_manager.set(context_id=context_3, address_value_list=[{ self._create_address('cccc'): b'3' }]) # 4) context_n = self.context_manager.create_context( state_hash=sh1, base_contexts=[context_1, context_2, context_3], inputs=[ self._create_address('bbbb'), self._create_address('aaaa') ], outputs=[ self._create_address('cccc'), self._create_address('llll') ]) # 5) self.context_manager.set(context_id=context_n, address_value_list=[{ self._create_address('cccc'): b'4', self._create_address('llll'): b'8' }]) # 6) cm_state_root = squash(state_root=sh1, context_ids=[context_n], persist=False, clean_up=True) tree = MerkleDatabase(self.database_results) calc_state_root = tree.update({ self._create_address('aaaa'): b'1', self._create_address('bbbb'): b'2', self._create_address('cccc'): b'4', self._create_address('llll'): b'8' }) self.assertEquals(calc_state_root, cm_state_root)
def test_complex_basecontext_squash(self): """Tests complex context basing and squashing. i=qq,dd dd=0 o=dd,pp pp=1 i=cc,aa +->context_3_2a_1+| o=dd,ll | | i=aa,ab +->context_2a| i=aa aa=0 | o=cc,ab | dd=10 | o=aa,ll ll=1 | sh0->context_1-->sh1| ll=11 +->context_3_2a_2+|->sh1 cc=0 | i=cc,aa +->context_3_2b_1+| ab=1 | o=nn,mm | i=nn,ba mm=0 | +->context_2b| o=mm,ba ba=1 | nn=0 | | mm=1 +->context_3_2b_2+| i=nn,oo ab=0 o=ab,oo oo=1 Notes: Test: 1. Create a context off of the first state hash, set addresses in it, and squash that context, getting a new merkle root. 2. Create 2 contexts with the context in # 1 as the base, and for each of these contexts set addresses to values where the outputs for each are disjoint. 3. For each of these 2 contexts create 2 more contexts each having one of the contexts in # 2 as the base context, and set addresses to values. 4. Squash the 4 contexts from #3 and assert the state hash is equal to a manually computed state hash. """ squash = self.context_manager.get_squash_handler() # 1) inputs_1 = [self._create_address('aa'), self._create_address('ab')] outputs_1 = [self._create_address('cc'), self._create_address('ab')] context_1 = self.context_manager.create_context( state_hash=self.first_state_hash, base_contexts=[], inputs=inputs_1, outputs=outputs_1) self.context_manager.set( context_id=context_1, address_value_list=[{ a: v } for a, v in zip(outputs_1, [bytes(i) for i in range(len(outputs_1))])]) sh1 = squash(state_root=self.first_state_hash, context_ids=[context_1], persist=True, clean_up=True) # 2) inputs_2a = [self._create_address('cc'), self._create_address('aa')] outputs_2a = [self._create_address('dd'), self._create_address('ll')] context_2a = self.context_manager.create_context( state_hash=self.first_state_hash, base_contexts=[], inputs=inputs_2a, outputs=outputs_2a) inputs_2b = [self._create_address('cc'), self._create_address('aa')] outputs_2b = [self._create_address('nn'), self._create_address('mm')] context_2b = self.context_manager.create_context(state_hash=sh1, base_contexts=[], inputs=inputs_2b, outputs=outputs_2b) self.context_manager.set( context_id=context_2a, address_value_list=[{ a: bytes(v) } for a, v in zip(outputs_2a, range(10, 10 + len(outputs_2a)))]) self.context_manager.set( context_id=context_2b, address_value_list=[{ a: bytes(v) } for a, v in zip(outputs_2b, range(len(outputs_2b)))]) # 3) inputs_3_2a_1 = [ self._create_address('qq'), self._create_address('dd') ] outputs_3_2a_1 = [ self._create_address('dd'), self._create_address('pp') ] context_3_2a_1 = self.context_manager.create_context( state_hash=sh1, base_contexts=[context_2a], inputs=inputs_3_2a_1, outputs=outputs_3_2a_1) inputs_3_2a_2 = [self._create_address('aa')] outputs_3_2a_2 = [ self._create_address('aa'), self._create_address('ll') ] context_3_2a_2 = self.context_manager.create_context( state_hash=sh1, base_contexts=[context_2a], inputs=inputs_3_2a_2, outputs=outputs_3_2a_2) inputs_3_2b_1 = [ self._create_address('nn'), self._create_address('ab') ] outputs_3_2b_1 = [ self._create_address('mm'), self._create_address('ba') ] context_3_2b_1 = self.context_manager.create_context( state_hash=sh1, base_contexts=[context_2b], inputs=inputs_3_2b_1, outputs=outputs_3_2b_1) inputs_3_2b_2 = [ self._create_address('nn'), self._create_address('oo') ] outputs_3_2b_2 = [ self._create_address('ab'), self._create_address('oo') ] context_3_2b_2 = self.context_manager.create_context( state_hash=sh1, base_contexts=[context_2b], inputs=inputs_3_2b_2, outputs=outputs_3_2b_2) self.context_manager.set( context_id=context_3_2a_1, address_value_list=[{ a: bytes(v) } for a, v in zip(outputs_3_2a_1, range(len(outputs_3_2a_1)))]) self.context_manager.set( context_id=context_3_2a_2, address_value_list=[{ a: bytes(v) } for a, v in zip(outputs_3_2a_2, range(len(outputs_3_2a_2)))]) self.context_manager.set( context_id=context_3_2b_1, address_value_list=[{ a: bytes(v) } for a, v in zip(outputs_3_2b_1, range(len(outputs_3_2b_1)))]) self.context_manager.set( context_id=context_3_2b_2, address_value_list=[{ a: bytes(v) } for a, v in zip(outputs_3_2b_2, range(len(outputs_3_2b_2)))]) # 4) sh2 = squash(state_root=sh1, context_ids=[ context_3_2a_1, context_3_2a_2, context_3_2b_1, context_3_2b_2 ], persist=False, clean_up=True) tree = MerkleDatabase(self.database_results) state_hash_from_1 = tree.update(set_items={ a: v for a, v in zip(outputs_1, [bytes(i) for i in range(len(outputs_1))]) }, virtual=False) self.assertEquals( state_hash_from_1, sh1, "The manually calculated state hash from the first " "context and the one calculated by squashing that " "state hash should be the same") tree.set_merkle_root(state_hash_from_1) test_sh2 = tree.update( set_items={ self._create_address('aa'): bytes(0), self._create_address('ab'): bytes(0), self._create_address('ba'): bytes(1), self._create_address('dd'): bytes(0), self._create_address('ll'): bytes(1), self._create_address('mm'): bytes(0), self._create_address('oo'): bytes(1), self._create_address('pp'): bytes(1), self._create_address('nn'): bytes(0), self._create_address('cc'): bytes(0) }) self.assertEquals( sh2, test_sh2, "Manually calculated and context " "manager calculated merkle hashes " "are the same")
def _squash(state_root, context_ids, persist, clean_up): contexts_in_chain = deque() contexts_in_chain.extend(context_ids) context_ids_already_searched = [] context_ids_already_searched.extend(context_ids) """ # for testing # check state for testing tree = MerkleDatabase(self._database, state_root) try: tree._get_by_addr("449095bc5d9deba00a635d8db93c9deeb043416204f494b9f07862e9445559f0185109") LOGGER.debug('_SQUASH: ADDRESS YES BEFORE\n') except : LOGGER.debug('_SQUASH: ADDRESS NO BEFORE\n') """ # There is only one exit condition and that is when all the # contexts have been accessed once. #LOGGER.debug('_SQUASH: persist=%s clean_up=%s \n',persist,clean_up) updates = dict() deletes = set() while contexts_in_chain: current_c_id = contexts_in_chain.popleft() current_context = self._contexts[current_c_id] if not current_context.is_read_only(): current_context.make_read_only() addresses_w_values = current_context.get_all_if_set() for add, val in addresses_w_values.items(): # Since we are moving backwards through the graph of # contexts, only update if the address hasn't been set # or deleted if add not in updates and add not in deletes: updates[add] = val addresses_w_values = current_context.get_all_if_deleted() for add, _ in addresses_w_values.items(): # Since we are moving backwards through the graph of # contexts, only add to deletes if the address hasn't been # previously deleted or set in the graph if add not in updates and add not in deletes: deletes.add(add) for c_id in current_context.base_contexts: if c_id not in context_ids_already_searched: contexts_in_chain.append(c_id) context_ids_already_searched.append(c_id) tree = MerkleDatabase(self._database, state_root) # was here """ # check state for testing try: tree._get_by_addr("449095bc5d9deba00a635d8db93c9deeb043416204f494b9f07862e9445559f0185109") LOGGER.debug('_SQUASH: ADDRESS YES STATE=%s\n',state_root[:8] if state_root is not None else None) except : LOGGER.debug('_SQUASH: ADDRESS NO STATE=%s\n',state_root[:8] if state_root is not None else None) """ # filter the delete list to just those items in the tree deletes = [addr for addr in deletes if addr in tree] if not updates and not deletes: state_hash = state_root else: virtual = not persist # for compute new state - we can save updates, deletes for recompute it for DAG state_hash = tree.update(updates, deletes, virtual=virtual) #LOGGER.debug('_SQUASH: virtual=%s updates=%s deletes=%s STATE=%s\n',virtual,updates,deletes,state_hash[:8]) if clean_up: self.delete_contexts(context_ids_already_searched) """ # check state for testing try: tree._get_by_addr("449095bc5d9deba00a635d8db93c9deeb043416204f494b9f07862e9445559f0185109") LOGGER.debug('_SQUASH: ADDRESS YES AFTER STATE=%s\n',state_root[:8] if state_root is not None else None) except : LOGGER.debug('_SQUASH: ADDRESS NO AFTER STATE=%s\n',state_root[:8] if state_root is not None else None) """ return (state_hash, updates, deletes) # for DAG
class TestSawtoothMerkleTrie(unittest.TestCase): def setUp(self): self.dir = tempfile.mkdtemp() self.file = os.path.join(self.dir, 'merkle.lmdb') self.lmdb = lmdb_nolock_database.LMDBNoLockDatabase(self.file, 'n') self.trie = MerkleDatabase(self.lmdb) def tearDown(self): self.trie.close() def test_merkle_trie_root_advance(self): value = {'name': 'foo', 'value': 1} orig_root = self.get_merkle_root() new_root = self.set('foo', value) self.assert_root(orig_root) self.assert_no_key('foo') self.set_merkle_root(new_root) self.assert_root(new_root) self.assert_value_at_address('foo', value) def test_merkle_trie_delete(self): value = {'name': 'bar', 'value': 1} new_root = self.set('bar', value) self.set_merkle_root(new_root) self.assert_root(new_root) self.assert_value_at_address('bar', value) # deleting an invalid key should raise an error with self.assertRaises(KeyError): self.delete('barf') del_root = self.delete('bar') # del_root hasn't been set yet, so address should still have value self.assert_root(new_root) self.assert_value_at_address('bar', value) self.set_merkle_root(del_root) self.assert_root(del_root) self.assert_no_key('bar') def test_merkle_trie_update(self): init_root = self.get_merkle_root() values = {} key_hashes = { key: _hash(key) for key in (_random_string(10) for _ in range(1000)) } for key, hashed in key_hashes.items(): value = {key: _random_string(512)} new_root = self.set(hashed, value, ishash=True) values[hashed] = value self.set_merkle_root(new_root) self.assert_not_root(init_root) for address, value in values.items(): self.assert_value_at_address(address, value, ishash=True) set_items = { hashed: { key: 5.0 } for key, hashed in random.sample(key_hashes.items(), 50) } values.update(set_items) delete_items = { hashed for hashed in random.sample(list(key_hashes.values()), 50) } # make sure there are no sets and deletes of the same key delete_items = delete_items - set_items.keys() for addr in delete_items: del values[addr] virtual_root = self.update(set_items, delete_items, virtual=True) # virtual root shouldn't match actual contents of tree with self.assertRaises(KeyError): self.set_merkle_root(virtual_root) actual_root = self.update(set_items, delete_items, virtual=False) # the virtual root should be the same as the actual root self.assertEqual(virtual_root, actual_root) # neither should be the root yet self.assert_not_root(virtual_root, actual_root) self.set_merkle_root(actual_root) self.assert_root(actual_root) for address, value in values.items(): self.assert_value_at_address(address, value, ishash=True) for address in delete_items: with self.assertRaises(KeyError): self.get(address, ishash=True) # assertions def assert_value_at_address(self, address, value, ishash=False): self.assertEqual(self.get(address, ishash), value, 'Wrong value') def assert_no_key(self, key): with self.assertRaises(KeyError): self.get(key) def assert_root(self, expected): self.assertEqual(expected, self.get_merkle_root(), 'Wrong root') def assert_not_root(self, *not_roots): root = self.get_merkle_root() for not_root in not_roots: self.assertNotEqual(root, not_root, 'Wrong root') # trie accessors # For convenience, assume keys are not hashed # unless otherwise indicated. def set(self, key, val, ishash=False): key_ = key if ishash else _hash(key) return self.trie.set(key_, val) def get(self, key, ishash=False): key_ = key if ishash else _hash(key) return self.trie.get(key_) def delete(self, key, ishash=False): key_ = key if ishash else _hash(key) return self.trie.delete(key_) def set_merkle_root(self, root): self.trie.set_merkle_root(root) def get_merkle_root(self): return self.trie.get_merkle_root() def update(self, set_items, delete_items=None, virtual=True): return self.trie.update(set_items, delete_items, virtual=virtual)
class TestSawtoothMerkleTrie(unittest.TestCase): def setUp(self): self.lmdb = lmdb_nolock_database.LMDBNoLockDatabase( "/home/vagrant/merkle.lmdb", 'n') self.trie = MerkleDatabase(self.lmdb) def tearDown(self): self.trie.close() def test_merkle_trie_root_advance(self): value = {"name": "foo", "value": 1} orig_root = self.trie.get_merkle_root() new_root = self.trie.set(MerkleDatabase.hash("foo"), value) with self.assertRaises(KeyError): self.trie.get(MerkleDatabase.hash("foo")) self.trie.set_merkle_root(new_root) self.assertEqual(self.trie.get(MerkleDatabase.hash("foo")), value) def test_merkle_trie_delete(self): value = {"name": "bar", "value": 1} new_root = self.trie.set(MerkleDatabase.hash("bar"), value) self.trie.set_merkle_root(new_root) self.assertEqual(self.trie.get(MerkleDatabase.hash("bar")), value) del_root = self.trie.delete(MerkleDatabase.hash("bar")) self.trie.set_merkle_root(del_root) with self.assertRaises(KeyError): self.trie.get(MerkleDatabase.hash("bar")) def test_merkle_trie_update(self): value = ''.join(random.choice(string.ascii_lowercase) for _ in range(512)) keys = [] for i in range(1000): key = ''.join(random.choice(string.ascii_lowercase) for _ in range(10)) keys.append(key) hash = MerkleDatabase.hash(key) new_root = self.trie.set(hash, {key: value}) self.trie.set_merkle_root(new_root) set_items = {} for key in random.sample(keys, 50): hash = MerkleDatabase.hash(key) thing = {key: 5.0} set_items[hash] = thing update_root = self.trie.update(set_items) self.trie.set_merkle_root(update_root) for address in set_items: self.assertEqual(self.trie.get(address), set_items[address])
class TestIdentityView(unittest.TestCase): def __init__(self, test_name): super().__init__(test_name) self._temp_dir = None def setUp(self): self._temp_dir = tempfile.mkdtemp() self._database = NativeLmdbDatabase( os.path.join(self._temp_dir, 'test_identity_view.lmdb'), _size=10 * 1024 * 1024) self._tree = MerkleDatabase(self._database) def tearDown(self): shutil.rmtree(self._temp_dir) def test_identityview_roles(self): """Tests get_role and get_roles get the correct Roles and the IdentityViewFactory produces the correct view of the database. Notes: 1. Create an empty MerkleDatabase and update it with one serialized RoleList. 2. Assert that get_role returns that named Role. 3. Assert that get_role returns None for a name that doesn't correspond to a Role. 4. Assert that all the Roles are returned by get_roles. 5. Update the MerkleDatabase with another serialized RoleList with a different name. 6. Repeat 2. 7. Repeat 3. 8. Repeat 4. """ state_view_factory = StateViewFactory(self._database) identity_view_factory = identity_view.IdentityViewFactory( state_view_factory=state_view_factory) # 1. role_list = identity_pb2.RoleList() role1 = role_list.roles.add() role1_name = "sawtooth.test.example1" role1.name = role1_name role1.policy_name = "this_is_an_example" state_root1 = self._tree.update( set_items={ _get_role_address(role1_name): role_list.SerializeToString() }, virtual=False) # 2. identity_view1 = identity_view_factory.create_identity_view( state_hash=state_root1) self.assertEqual( identity_view1.get_role(role1_name), role1, "IdentityView().get_role returns the correct Role by name.") # 3. self.assertIsNone( identity_view1.get_role("Not-a-Role"), "IdentityView().get_role returns None if there is " "no Role with that name.") # 4. self.assertEqual(identity_view1.get_roles(), [role1], "IdentityView().get_roles returns all the roles in" " State.") # 5. role_list2 = identity_pb2.RoleList() role2 = role_list2.roles.add() role2_name = "sawtooth.test.example2" role2.name = role2_name role2.policy_name = "this_is_another_example" self._tree.set_merkle_root(merkle_root=state_root1) state_root2 = self._tree.update( { _get_role_address(role2_name): role_list2.SerializeToString() }, virtual=False) # 6. identity_view2 = identity_view_factory.create_identity_view( state_hash=state_root2) self.assertEqual( identity_view2.get_role(role2_name), role2, "IdentityView().get_role returns the correct Role by name.") # 7. self.assertIsNone( identity_view2.get_role("not-a-role2"), "IdentityView().get_role returns None for names that don't " "correspond to a Role.") # 8. self.assertEqual( identity_view2.get_roles(), [role1, role2], "IdentityView().get_roles() returns all the Roles in alphabetical " "order by name.") def test_identityview_policy(self): """Tests get_policy and get_policies get the correct Policies and the IdentityViewFactory produces the correct view of the database. Notes: 1. Create an empty MerkleDatabase and update it with one serialized PolicyList. 2. Assert that get_policy returns that named Policy. 3. Assert that get_policy returns None for a name that doesn't correspond to a Policy. 4. Assert that all the Policies are returned by get_policies. 5. Update the MerkleDatabase with another serialized PolicyList with a different name. 6. Repeat 2. 7. Repeat 3. 8. Repeat 4. """ state_view_factory = StateViewFactory(self._database) identity_view_factory = identity_view.IdentityViewFactory( state_view_factory=state_view_factory) # 1. policy_list = identity_pb2.PolicyList() policy1 = policy_list.policies.add() policy1_name = "deny_all_keys" policy1.name = policy1_name state_root1 = self._tree.update( set_items={ _get_policy_address(policy1_name): policy_list.SerializeToString() }, virtual=False) # 2. identity_view1 = identity_view_factory.create_identity_view( state_hash=state_root1) self.assertEqual( identity_view1.get_policy(policy1_name), policy1, "IdentityView().get_policy returns the correct Policy by name.") # 3. self.assertIsNone( identity_view1.get_policy("Not-a-Policy"), "IdentityView().get_policy returns None if " "there is no Policy with that name.") # 4. self.assertEqual(identity_view1.get_policies(), [policy1], "IdentityView().get_policies returns all the " "policies in State.") # 5. policy_list2 = identity_pb2.PolicyList() policy2 = policy_list2.policies.add() policy2_name = "accept_all_keys" policy2.name = policy2_name self._tree.set_merkle_root(merkle_root=state_root1) state_root2 = self._tree.update( { _get_policy_address(policy2_name): policy_list2.SerializeToString() }, virtual=False) # 6. identity_view2 = identity_view_factory.create_identity_view( state_hash=state_root2) self.assertEqual( identity_view2.get_policy(policy2_name), policy2, "IdentityView().get_policy returns the correct Policy by name.") # 7. self.assertIsNone( identity_view2.get_policy("not-a-policy2"), "IdentityView().get_policy returns None for names that don't " "correspond to a Policy.") # 8. self.assertEqual( identity_view2.get_policies(), [policy2, policy1], "IdentityView().get_policies returns all the Policies in " "alphabetical order by name.")
def test_complex_basecontext_squash(self): """Tests complex context basing and squashing. i=qq,dd dd=0 o=dd,pp pp=1 i=cc,aa +->context_3_2a_1+| o=dd,ll | | i=aa,ab +->context_2a| i=aa aa=0 | o=cc,ab | dd=10 | o=aa,ll ll=1 | sh0->context_1-->sh1| ll=11 +->context_3_2a_2+|->sh1 cc=0 | i=cc,aa +->context_3_2b_1+| ab=1 | o=nn,mm | i=nn,ba mm=0 | +->context_2b| o=mm,ba ba=1 | nn=0 | | mm=1 +->context_3_2b_2+| i=nn,oo ab=0 o=ab,oo oo=1 Notes: Test: 1. Create a context off of the first state hash, set addresses in it, and squash that context, getting a new merkle root. 2. Create 2 contexts with the context in # 1 as the base, and for each of these contexts set addresses to values where the outputs for each are disjoint. 3. For each of these 2 contexts create 2 more contexts each having one of the contexts in # 2 as the base context, and set addresses to values. 4. Squash the 4 contexts from #3 and assert the state hash is equal to a manually computed state hash. """ squash = self.context_manager.get_squash_handler() # 1) inputs_1 = [self._create_address('aa'), self._create_address('ab')] outputs_1 = [self._create_address('cc'), self._create_address('ab')] context_1 = self.context_manager.create_context( state_hash=self.first_state_hash, base_contexts=[], inputs=inputs_1, outputs=outputs_1) self.context_manager.set( context_id=context_1, address_value_list=[{a: v} for a, v in zip( outputs_1, [bytes(i) for i in range(len(outputs_1))])]) sh1 = squash( state_root=self.first_state_hash, context_ids=[context_1], persist=True, clean_up=True) # 2) inputs_2a = [self._create_address('cc'), self._create_address('aa')] outputs_2a = [self._create_address('dd'), self._create_address('ll')] context_2a = self.context_manager.create_context( state_hash=self.first_state_hash, base_contexts=[], inputs=inputs_2a, outputs=outputs_2a) inputs_2b = [self._create_address('cc'), self._create_address('aa')] outputs_2b = [self._create_address('nn'), self._create_address('mm')] context_2b = self.context_manager.create_context( state_hash=sh1, base_contexts=[], inputs=inputs_2b, outputs=outputs_2b) self.context_manager.set( context_id=context_2a, address_value_list=[{a: bytes(v)} for a, v in zip(outputs_2a, range(10, 10 + len(outputs_2a)))] ) self.context_manager.set( context_id=context_2b, address_value_list=[{a: bytes(v)} for a, v in zip(outputs_2b, range(len(outputs_2b)))] ) # 3) inputs_3_2a_1 = [self._create_address('qq'), self._create_address('dd')] outputs_3_2a_1 = [self._create_address('dd'), self._create_address('pp')] context_3_2a_1 = self.context_manager.create_context( state_hash=sh1, base_contexts=[context_2a], inputs=inputs_3_2a_1, outputs=outputs_3_2a_1 ) inputs_3_2a_2 = [self._create_address('aa')] outputs_3_2a_2 = [self._create_address('aa'), self._create_address('ll')] context_3_2a_2 = self.context_manager.create_context( state_hash=sh1, base_contexts=[context_2a], inputs=inputs_3_2a_2, outputs=outputs_3_2a_2) inputs_3_2b_1 = [self._create_address('nn'), self._create_address('ab')] outputs_3_2b_1 = [self._create_address('mm'), self._create_address('ba')] context_3_2b_1 = self.context_manager.create_context( state_hash=sh1, base_contexts=[context_2b], inputs=inputs_3_2b_1, outputs=outputs_3_2b_1) inputs_3_2b_2 = [self._create_address('nn'), self._create_address('oo')] outputs_3_2b_2 = [self._create_address('ab'), self._create_address('oo')] context_3_2b_2 = self.context_manager.create_context( state_hash=sh1, base_contexts=[context_2b], inputs=inputs_3_2b_2, outputs=outputs_3_2b_2) self.context_manager.set( context_id=context_3_2a_1, address_value_list=[{a: bytes(v)} for a, v in zip(outputs_3_2a_1, range(len(outputs_3_2a_1)))]) self.context_manager.set( context_id=context_3_2a_2, address_value_list=[{a: bytes(v)} for a, v in zip(outputs_3_2a_2, range(len(outputs_3_2a_2)))]) self.context_manager.set( context_id=context_3_2b_1, address_value_list=[{a: bytes(v)} for a, v in zip(outputs_3_2b_1, range(len(outputs_3_2b_1)))]) self.context_manager.set( context_id=context_3_2b_2, address_value_list=[{a: bytes(v)} for a, v in zip(outputs_3_2b_2, range(len(outputs_3_2b_2)))]) # 4) sh2 = squash( state_root=sh1, context_ids=[context_3_2a_1, context_3_2a_2, context_3_2b_1, context_3_2b_2], persist=False, clean_up=True) tree = MerkleDatabase(self.database_results) state_hash_from_1 = tree.update( set_items={a: v for a, v in zip(outputs_1, [bytes(i) for i in range(len(outputs_1))])}, virtual=False) self.assertEquals(state_hash_from_1, sh1, "The manually calculated state hash from the first " "context and the one calculated by squashing that " "state hash should be the same") tree.set_merkle_root(state_hash_from_1) test_sh2 = tree.update(set_items={self._create_address('aa'): bytes(0), self._create_address('ab'): bytes(0), self._create_address('ba'): bytes(1), self._create_address('dd'): bytes(0), self._create_address('ll'): bytes(1), self._create_address('mm'): bytes(0), self._create_address('oo'): bytes(1), self._create_address('pp'): bytes(1), self._create_address('nn'): bytes(0), self._create_address('cc'): bytes(0)}) self.assertEquals(sh2, test_sh2, "Manually calculated and context " "manager calculated merkle hashes " "are the same")
def test_state_root_after_parallel_ctx(self): """Tests that the correct state root is calculated after basing one context off of multiple contexts. i=abcd o=aaaa +>context_1+ | aaaa=1 | | | i=llll | i=bacd | i=bbbb,aaaa o=llll | o=bbbb | o=cccc,llll sh0--->ctx_0-->sh1>|-->context_2-+---->context_n---->sh2 llll=5 | bbbb=2 | cccc=4 | | llll=8 | i=abcd | | o=cccc | +>context_3+ cccc=3 Notes: Test: 1. Create a context, set a value in it and squash it into a new state hash. 2. Create 3 contexts based off of the state root from #1. 3. Set values at addresses to all three contexts. 4. Base another context off of the contexts from #2. 5. Set a value to an address in this context that has already been set to in the non-base context. 6. Squash the contexts producing a state hash and assert that it equals a state hash obtained by manually updating the merkle tree. """ sh0 = self.first_state_hash # 1) squash = self.context_manager.get_squash_handler() ctx_1 = self.context_manager.create_context( state_hash=sh0, base_contexts=[], inputs=[self._create_address('llll')], outputs=[self._create_address('llll')] ) self.context_manager.set( context_id=ctx_1, address_value_list=[{self._create_address('llll'): b'5'}] ) sh1 = squash( state_root=sh0, context_ids=[ctx_1], persist=True, clean_up=True) # 2) context_1 = self.context_manager.create_context( state_hash=sh1, base_contexts=[], inputs=[self._create_address('abcd')], outputs=[self._create_address('aaaa')] ) context_2 = self.context_manager.create_context( state_hash=sh1, base_contexts=[], inputs=[self._create_address('bacd')], outputs=[self._create_address('bbbb')] ) context_3 = self.context_manager.create_context( state_hash=sh1, base_contexts=[], inputs=[self._create_address('abcd')], outputs=[self._create_address('cccc'), self._create_address('dddd')] ) # 3) self.context_manager.set( context_id=context_1, address_value_list=[{self._create_address('aaaa'): b'1'}] ) self.context_manager.set( context_id=context_2, address_value_list=[{self._create_address('bbbb'): b'2'}] ) self.context_manager.set( context_id=context_3, address_value_list=[{self._create_address('cccc'): b'3'}] ) # 4) context_n = self.context_manager.create_context( state_hash=sh1, base_contexts=[context_1, context_2, context_3], inputs=[self._create_address('bbbb'), self._create_address('aaaa')], outputs=[self._create_address('cccc'), self._create_address('llll')] ) # 5) self.context_manager.set( context_id=context_n, address_value_list=[{self._create_address('cccc'): b'4', self._create_address('llll'): b'8'}] ) # 6) cm_state_root = squash( state_root=sh1, context_ids=[context_n], persist=False, clean_up=True) tree = MerkleDatabase(self.database_results) calc_state_root = tree.update({self._create_address('aaaa'): b'1', self._create_address('bbbb'): b'2', self._create_address('cccc'): b'4', self._create_address('llll'): b'8'}) self.assertEquals(calc_state_root, cm_state_root)
class TestSawtoothMerkleTrie(unittest.TestCase): def setUp(self): self.dir = tempfile.mkdtemp() self.file = os.path.join(self.dir, 'merkle.lmdb') self.lmdb = lmdb_nolock_database.LMDBNoLockDatabase( self.file, 'n') self.trie = MerkleDatabase(self.lmdb) def tearDown(self): self.trie.close() def test_merkle_trie_root_advance(self): value = {'name': 'foo', 'value': 1} orig_root = self.get_merkle_root() new_root = self.set('foo', value) self.assert_root(orig_root) self.assert_no_key('foo') self.set_merkle_root(new_root) self.assert_root(new_root) self.assert_value_at_address('foo', value) def test_merkle_trie_delete(self): value = {'name': 'bar', 'value': 1} new_root = self.set('bar', value) self.set_merkle_root(new_root) self.assert_root(new_root) self.assert_value_at_address('bar', value) # deleting an invalid key should raise an error with self.assertRaises(KeyError): self.delete('barf') del_root = self.delete('bar') # del_root hasn't been set yet, so address should still have value self.assert_root(new_root) self.assert_value_at_address('bar', value) self.set_merkle_root(del_root) self.assert_root(del_root) self.assert_no_key('bar') def test_merkle_trie_update(self): init_root = self.get_merkle_root() values = {} key_hashes = { key: _hash(key) for key in (_random_string(10) for _ in range(1000)) } for key, hashed in key_hashes.items(): value = {key: _random_string(512)} new_root = self.set(hashed, value, ishash=True) values[hashed] = value self.set_merkle_root(new_root) self.assert_not_root(init_root) for address, value in values.items(): self.assert_value_at_address( address, value, ishash=True) set_items = { hashed: { key: 5.0 } for key, hashed in random.sample(key_hashes.items(), 50) } values.update(set_items) delete_items = { hashed for hashed in random.sample(list(key_hashes.values()), 50) } # make sure there are no sets and deletes of the same key delete_items = delete_items - set_items.keys() for addr in delete_items: del values[addr] virtual_root = self.update(set_items, delete_items, virtual=True) # virtual root shouldn't match actual contents of tree with self.assertRaises(KeyError): self.set_merkle_root(virtual_root) actual_root = self.update(set_items, delete_items, virtual=False) # the virtual root should be the same as the actual root self.assertEqual(virtual_root, actual_root) # neither should be the root yet self.assert_not_root( virtual_root, actual_root) self.set_merkle_root(actual_root) self.assert_root(actual_root) for address, value in values.items(): self.assert_value_at_address( address, value, ishash=True) for address in delete_items: with self.assertRaises(KeyError): self.get(address, ishash=True) # assertions def assert_value_at_address(self, address, value, ishash=False): self.assertEqual( self.get(address, ishash), value, 'Wrong value') def assert_no_key(self, key): with self.assertRaises(KeyError): self.get(key) def assert_root(self, expected): self.assertEqual( expected, self.get_merkle_root(), 'Wrong root') def assert_not_root(self, *not_roots): root = self.get_merkle_root() for not_root in not_roots: self.assertNotEqual( root, not_root, 'Wrong root') # trie accessors # For convenience, assume keys are not hashed # unless otherwise indicated. def set(self, key, val, ishash=False): key_ = key if ishash else _hash(key) return self.trie.set(key_, val) def get(self, key, ishash=False): key_ = key if ishash else _hash(key) return self.trie.get(key_) def delete(self, key, ishash=False): key_ = key if ishash else _hash(key) return self.trie.delete(key_) def set_merkle_root(self, root): self.trie.set_merkle_root(root) def get_merkle_root(self): return self.trie.get_merkle_root() def update(self, set_items, delete_items=None, virtual=True): return self.trie.update(set_items, delete_items, virtual=virtual)
class TestSawtoothMerkleTrie(unittest.TestCase): def setUp(self): self.dir = tempfile.mkdtemp() self.file = os.path.join(self.dir, 'merkle.lmdb') self.lmdb = NativeLmdbDatabase( self.file, indexes=MerkleDatabase.create_index_configuration(), _size=120 * 1024 * 1024) self.trie = MerkleDatabase(self.lmdb) def tearDown(self): self.trie.close() shutil.rmtree(self.dir) def test_merkle_trie_root_advance(self): value = {'name': 'foo', 'value': 1} orig_root = self.get_merkle_root() new_root = self.set('foo', value) self.assert_root(orig_root) self.assert_no_key('foo') self.set_merkle_root(new_root) self.assert_root(new_root) self.assert_value_at_address('foo', value) def test_merkle_trie_delete(self): value = {'name': 'bar', 'value': 1} new_root = self.set('bar', value) self.set_merkle_root(new_root) self.assert_root(new_root) self.assert_value_at_address('bar', value) # deleting an invalid key should raise an error with self.assertRaises(KeyError): self.delete('barf') del_root = self.delete('bar') # del_root hasn't been set yet, so address should still have value self.assert_root(new_root) self.assert_value_at_address('bar', value) self.set_merkle_root(del_root) self.assert_root(del_root) self.assert_no_key('bar') def test_merkle_trie_update(self): init_root = self.get_merkle_root() values = {} key_hashes = { key: _hash(key) for key in (_random_string(10) for _ in range(1000)) } for key, hashed in key_hashes.items(): value = {key: _random_string(512)} new_root = self.set(hashed, value, ishash=True) values[hashed] = value self.set_merkle_root(new_root) self.assert_not_root(init_root) for address, value in values.items(): self.assert_value_at_address( address, value, ishash=True) set_items = { hashed: { key: 5.0 } for key, hashed in random.sample(key_hashes.items(), 50) } values.update(set_items) delete_items = { hashed for hashed in random.sample(list(key_hashes.values()), 50) } # make sure there are no sets and deletes of the same key delete_items = delete_items - set_items.keys() for addr in delete_items: del values[addr] virtual_root = self.update(set_items, delete_items, virtual=True) # virtual root shouldn't match actual contents of tree with self.assertRaises(KeyError): self.set_merkle_root(virtual_root) actual_root = self.update(set_items, delete_items, virtual=False) # the virtual root should be the same as the actual root self.assertEqual(virtual_root, actual_root) # neither should be the root yet self.assert_not_root( virtual_root, actual_root) self.set_merkle_root(actual_root) self.assert_root(actual_root) for address, value in values.items(): self.assert_value_at_address( address, value, ishash=True) for address in delete_items: with self.assertRaises(KeyError): self.get(address, ishash=True) def test_merkle_trie_leaf_iteration(self): new_root = self.update({ "010101": {"my_data": 1}, "010202": {"my_data": 2}, "010303": {"my_data": 3} }, [], virtual=False) # iterate over the empty trie iterator = iter(self.trie) with self.assertRaises(StopIteration): next(iterator) self.set_merkle_root(new_root) # Test complete trie iteration self.assertEqual( [("010101", {"my_data": 1}), ("010202", {"my_data": 2}), ("010303", {"my_data": 3})], [entry for entry in iter(self.trie)]) # Test prefixed iteration self.assertEqual([("010202", {"my_data": 2})], [entry for entry in self.trie.leaves('0102')]) # assertions def assert_value_at_address(self, address, value, ishash=False): self.assertEqual( self.get(address, ishash), value, 'Wrong value') def assert_no_key(self, key): with self.assertRaises(KeyError): self.get(key) def assert_root(self, expected): self.assertEqual( expected, self.get_merkle_root(), 'Wrong root') def assert_not_root(self, *not_roots): root = self.get_merkle_root() for not_root in not_roots: self.assertNotEqual( root, not_root, 'Wrong root') # trie accessors # For convenience, assume keys are not hashed # unless otherwise indicated. def set(self, key, val, ishash=False): key_ = key if ishash else _hash(key) return self.trie.set(key_, val) def get(self, key, ishash=False): key_ = key if ishash else _hash(key) return self.trie.get(key_) def delete(self, key, ishash=False): key_ = key if ishash else _hash(key) return self.trie.delete(key_) def set_merkle_root(self, root): self.trie.set_merkle_root(root) def get_merkle_root(self): return self.trie.get_merkle_root() def update(self, set_items, delete_items=None, virtual=True): return self.trie.update(set_items, delete_items, virtual=virtual)
class TestIdentityView(unittest.TestCase): def __init__(self, test_name): super().__init__(test_name) self._temp_dir = None def setUp(self): self._temp_dir = tempfile.mkdtemp() self._database = NativeLmdbDatabase( os.path.join(self._temp_dir, 'test_identity_view.lmdb'), indexes=MerkleDatabase.create_index_configuration(), _size=10 * 1024 * 1024) self._tree = MerkleDatabase(self._database) def tearDown(self): shutil.rmtree(self._temp_dir) def test_identityview_roles(self): """Tests get_role and get_roles get the correct Roles and the IdentityViewFactory produces the correct view of the database. Notes: 1. Create an empty MerkleDatabase and update it with one serialized RoleList. 2. Assert that get_role returns that named Role. 3. Assert that get_role returns None for a name that doesn't correspond to a Role. 4. Assert that all the Roles are returned by get_roles. 5. Update the MerkleDatabase with another serialized RoleList with a different name. 6. Repeat 2. 7. Repeat 3. 8. Repeat 4. """ state_view_factory = StateViewFactory(self._database) identity_view_factory = identity_view.IdentityViewFactory( state_view_factory=state_view_factory) # 1. role_list = identity_pb2.RoleList() role1 = role_list.roles.add() role1_name = "sawtooth.test.example1" role1.name = role1_name role1.policy_name = "this_is_an_example" state_root1 = self._tree.update( set_items={ _get_role_address(role1_name): role_list.SerializeToString() }, virtual=False) # 2. identity_view1 = identity_view_factory.create_identity_view( state_hash=state_root1) self.assertEqual( identity_view1.get_role(role1_name), role1, "IdentityView().get_role returns the correct Role by name.") # 3. self.assertIsNone( identity_view1.get_role("Not-a-Role"), "IdentityView().get_role returns None if there is " "no Role with that name.") # 4. self.assertEqual(identity_view1.get_roles(), [role1], "IdentityView().get_roles returns all the roles in" " State.") # 5. role_list2 = identity_pb2.RoleList() role2 = role_list2.roles.add() role2_name = "sawtooth.test.example2" role2.name = role2_name role2.policy_name = "this_is_another_example" self._tree.set_merkle_root(merkle_root=state_root1) state_root2 = self._tree.update( { _get_role_address(role2_name): role_list2.SerializeToString() }, virtual=False) # 6. identity_view2 = identity_view_factory.create_identity_view( state_hash=state_root2) self.assertEqual( identity_view2.get_role(role2_name), role2, "IdentityView().get_role returns the correct Role by name.") # 7. self.assertIsNone( identity_view2.get_role("not-a-role2"), "IdentityView().get_role returns None for names that don't " "correspond to a Role.") # 8. self.assertEqual( identity_view2.get_roles(), [role1, role2], "IdentityView().get_roles() returns all the Roles in alphabetical " "order by name.") def test_identityview_policy(self): """Tests get_policy and get_policies get the correct Policies and the IdentityViewFactory produces the correct view of the database. Notes: 1. Create an empty MerkleDatabase and update it with one serialized PolicyList. 2. Assert that get_policy returns that named Policy. 3. Assert that get_policy returns None for a name that doesn't correspond to a Policy. 4. Assert that all the Policies are returned by get_policies. 5. Update the MerkleDatabase with another serialized PolicyList with a different name. 6. Repeat 2. 7. Repeat 3. 8. Repeat 4. """ state_view_factory = StateViewFactory(self._database) identity_view_factory = identity_view.IdentityViewFactory( state_view_factory=state_view_factory) # 1. policy_list = identity_pb2.PolicyList() policy1 = policy_list.policies.add() policy1_name = "deny_all_keys" policy1.name = policy1_name state_root1 = self._tree.update( set_items={ _get_policy_address(policy1_name): policy_list.SerializeToString() }, virtual=False) # 2. identity_view1 = identity_view_factory.create_identity_view( state_hash=state_root1) self.assertEqual( identity_view1.get_policy(policy1_name), policy1, "IdentityView().get_policy returns the correct Policy by name.") # 3. self.assertIsNone( identity_view1.get_policy("Not-a-Policy"), "IdentityView().get_policy returns None if " "there is no Policy with that name.") # 4. self.assertEqual(identity_view1.get_policies(), [policy1], "IdentityView().get_policies returns all the " "policies in State.") # 5. policy_list2 = identity_pb2.PolicyList() policy2 = policy_list2.policies.add() policy2_name = "accept_all_keys" policy2.name = policy2_name self._tree.set_merkle_root(merkle_root=state_root1) state_root2 = self._tree.update( { _get_policy_address(policy2_name): policy_list2.SerializeToString() }, virtual=False) # 6. identity_view2 = identity_view_factory.create_identity_view( state_hash=state_root2) self.assertEqual( identity_view2.get_policy(policy2_name), policy2, "IdentityView().get_policy returns the correct Policy by name.") # 7. self.assertIsNone( identity_view2.get_policy("not-a-policy2"), "IdentityView().get_policy returns None for names that don't " "correspond to a Policy.") # 8. self.assertEqual( identity_view2.get_policies(), [policy2, policy1], "IdentityView().get_policies returns all the Policies in " "alphabetical order by name.")