def test_merkle_trie_root_advance(self): value = {"name": "foo", "value": 1} orig_root = self.trie.get_merkle_root() new_root = self.trie.set(MerkleDatabase.hash("foo"), value) with self.assertRaises(KeyError): self.trie.get(MerkleDatabase.hash("foo")) self.trie.set_merkle_root(new_root) self.assertEquals(self.trie.get(MerkleDatabase.hash("foo")), value)
def run(self): while True: context_state_addresslist_tuple = self._addresses.get(block=True) c_id, state_hash, address_list = context_state_addresslist_tuple tree = MerkleDatabase(self._database, state_hash) return_values = [] for address in address_list: value = None try: value = tree.get(address) except KeyError: pass return_values.append((address, value)) self._inflated_addresses.put((c_id, return_values))
def _squash(state_root, context_ids): tree = MerkleDatabase(self._database, state_root) updates = dict() for c_id in context_ids: with self._shared_lock: context = self._contexts[c_id] for add in context.get_address_value_dict().keys(): if add in updates: raise SquashException( "Duplicate address {} in context {}".format( add, c_id)) updates.update({k: v.result() for k, v in context.get_address_value_dict().items()}) state_hash = tree.update(updates, virtual=False) return state_hash
def test_merkle_trie_delete(self): value = {"name": "bar", "value": 1} new_root = self.trie.set(MerkleDatabase.hash("bar"), value) self.trie.set_merkle_root(new_root) self.assertEquals(self.trie.get(MerkleDatabase.hash("bar")), value) del_root = self.trie.delete(MerkleDatabase.hash("bar")) self.trie.set_merkle_root(del_root) with self.assertRaises(KeyError): self.trie.get(MerkleDatabase.hash("bar"))
def commit_context(self, context_id_list): """ Part of the interface to the Executor Args: context_id_list: Returns: state_hash (str): the new state hash after the context_id_list has been committed """ if any([c_id not in self._contexts for c_id in context_id_list]): raise CommitException("Context Id not in contexts") first_id = context_id_list[0] if not all([ self._contexts[first_id].merkle_root == self._contexts[c_id].merkle_root for c_id in context_id_list ]): raise CommitException( "MerkleRoots not all equal, yet asking to merge") merkle_root = self._contexts[first_id].merkle_root tree = MerkleDatabase(self._database, merkle_root) merged_updates = {} for c_id in context_id_list: with self._shared_lock: context = self._contexts[c_id] del self._contexts[c_id] for k in context.get_writable_address_value_dict().iterkeys(): if k in merged_updates: raise CommitException( "Duplicate address {} in context {}".format(k, c_id)) merged_updates.update(context.get_writable_address_value_dict()) new_root = merkle_root add_value_dict = { address: value.result() for address, value in merged_updates.iteritems() } new_root = tree.update(set_items=add_value_dict) return new_root
class ClientStateGetRequestHandler(object): def __init__(self, database): self._tree = MerkleDatabase(database) def handle(self, message, responder): error = False status = None request = client_pb2.ClientStateGetRequest() try: request.ParseFromString( message.content) self._tree.set_merkle_root(request.merkle_root) except KeyError as e: status = client_pb2.ClientStateGetResponse.NORESOURCE LOGGER.info(e) error = True except DecodeError: LOGGER.info("Expected protobuf of class %s failed to " "deserialize", request) error = True if error: response = client_pb2.ClientStateGetResponse( status=status or client_pb2.ClientStateGetResponse.ERROR) else: address = request.address try: value = self._tree.get(address) status = client_pb2.ClientStateGetResponse.OK except KeyError: status = client_pb2.ClientStateGetResponse.NORESOURCE LOGGER.debug("No entry at state address %s", address) error = True except ValueError: status = client_pb2.ClientStateGetResponse.NONLEAF LOGGER.debug("Node at state address %s is a nonleaf", address) error = True response = client_pb2.ClientStateGetResponse(status=status) if not error: response.value = value responder.send(validator_pb2.Message( sender=message.sender, message_type=validator_pb2.Message.CLIENT_STATE_GET_RESPONSE, correlation_id=message.correlation_id, content=response.SerializeToString()))
class ClientStateListRequestHandler(object): def __init__(self, database): self._tree = MerkleDatabase(database) def handle(self, message, responder): error = False status = None request = client_pb2.ClientStateListRequest() try: request.ParseFromString( message.content) self._tree.set_merkle_root(request.merkle_root) except KeyError as e: status = client_pb2.ClientStateListResponse.NORESOURCE LOGGER.info(e) error = True except DecodeError: LOGGER.info("Expected protobuf of class %s failed to " "deserialize", request) error = True if error: response = client_pb2.ClientStateListResponse( status=status or client_pb2.ClientStateListResponse.ERROR) else: prefix = request.prefix leaves = self._tree.leaves(prefix) if len(leaves) == 0: status = client_pb2.ClientStateListResponse.NORESOURCE response = client_pb2.ClientStateListResponse(status=status) else: status = client_pb2.ClientStateListResponse.OK entries = [ Entry(address=a, data=v) for a, v in leaves.items()] response = client_pb2.ClientStateListResponse( status=status, entries=entries) responder.send(validator_pb2.Message( sender=message.sender, message_type=validator_pb2.Message.CLIENT_STATE_LIST_RESPONSE, correlation_id=message.correlation_id, content=response.SerializeToString()))
def test_merkle_trie_update(self): value = ''.join( random.choice(string.ascii_lowercase) for _ in range(512)) keys = [] for i in range(1000): key = ''.join( random.choice(string.ascii_lowercase) for _ in range(10)) keys.append(key) hash = MerkleDatabase.hash(key) new_root = self.trie.set(hash, {key: value}) self.trie.set_merkle_root(new_root) set_items = {} for key in random.sample(keys, 50): hash = MerkleDatabase.hash(key) thing = {key: 5.0} set_items[hash] = thing update_root = self.trie.update(set_items) self.trie.set_merkle_root(update_root) for address in set_items: self.assertEquals(self.trie.get(address), set_items[address])
def get_first_root(self): if self._first_merkle_root is not None: return self._first_merkle_root self._first_merkle_root = MerkleDatabase( self._database).get_merkle_root() return self._first_merkle_root
def setUp(self): self.lmdb = lmdb_nolock_database.LMDBNoLockDatabase( "/home/vagrant/merkle.lmdb", 'n') self.trie = MerkleDatabase(self.lmdb)
class TestSawtoothMerkleTrie(unittest.TestCase): def setUp(self): self.lmdb = lmdb_nolock_database.LMDBNoLockDatabase( "/home/vagrant/merkle.lmdb", 'n') self.trie = MerkleDatabase(self.lmdb) def tearDown(self): self.trie.close() def test_merkle_trie_root_advance(self): value = {"name": "foo", "value": 1} orig_root = self.trie.get_merkle_root() new_root = self.trie.set(MerkleDatabase.hash("foo"), value) with self.assertRaises(KeyError): self.trie.get(MerkleDatabase.hash("foo")) self.trie.set_merkle_root(new_root) self.assertEquals(self.trie.get(MerkleDatabase.hash("foo")), value) def test_merkle_trie_delete(self): value = {"name": "bar", "value": 1} new_root = self.trie.set(MerkleDatabase.hash("bar"), value) self.trie.set_merkle_root(new_root) self.assertEquals(self.trie.get(MerkleDatabase.hash("bar")), value) del_root = self.trie.delete(MerkleDatabase.hash("bar")) self.trie.set_merkle_root(del_root) with self.assertRaises(KeyError): self.trie.get(MerkleDatabase.hash("bar")) def test_merkle_trie_update(self): value = ''.join( random.choice(string.ascii_lowercase) for _ in range(512)) keys = [] for i in range(1000): key = ''.join( random.choice(string.ascii_lowercase) for _ in range(10)) keys.append(key) hash = MerkleDatabase.hash(key) new_root = self.trie.set(hash, {key: value}) self.trie.set_merkle_root(new_root) set_items = {} for key in random.sample(keys, 50): hash = MerkleDatabase.hash(key) thing = {key: 5.0} set_items[hash] = thing update_root = self.trie.update(set_items) self.trie.set_merkle_root(update_root) for address in set_items: self.assertEquals(self.trie.get(address), set_items[address])
def __init__(self, database): self._tree = MerkleDatabase(database)