Ejemplo n.º 1
0
    def get(self, context_id, address_list):
        """Get the values associated with list of addresses, for a specific
        context referenced by context_id.

        Args:
            context_id (str): the return value of create_context, referencing
                a particular context.
            address_list (list): a list of address strs

        Returns:
            values_list (list): a list of (address, value) tuples

        Raises:
            AuthorizationException: Raised when an address in address_list is
                not authorized either by not being in the inputs for the
                txn associated with this context, or it is under a namespace
                but the characters that are under the namespace are not valid
                address characters.
        """

        if context_id not in self._contexts:
            return []
        for add in address_list:
            if not self.address_is_valid(address=add):
                raise AuthorizationException(address=add)

        context = self._contexts[context_id]

        addresses_in_ctx = [add for add in address_list if add in context]
        addresses_not_in_ctx = list(set(address_list) - set(addresses_in_ctx))

        values = context.get(addresses_in_ctx)
        values_list = list(zip(addresses_in_ctx, values))
        if addresses_not_in_ctx:
            # Validate the addresses that won't be validated by a direct get on
            # the context.
            for address in addresses_not_in_ctx:
                context.validate_read(address)
            address_values, reads = self._find_address_values_in_chain(
                base_contexts=[context_id],
                addresses_to_find=addresses_not_in_ctx)

            values_list.extend(address_values)

            if reads:
                tree = MerkleDatabase(self._database, context.merkle_root)
                add_values = []
                for add in reads:
                    value = None
                    try:
                        value = tree.get(add)
                    except KeyError:
                        # The address is not in the radix tree/merkle tree
                        pass
                    add_values.append((add, value))
                values_list.extend(add_values)

            values_list.sort(key=lambda x: address_list.index(x[0]))

        return values_list
Ejemplo n.º 2
0
    def __init__(self):
        self.dir = '/tmp/sawtooth'  # tempfile.mkdtemp()
        self.file = os.path.join(self.dir, 'merkle.lmdb')

        self.lmdb = lmdb_nolock_database.LMDBNoLockDatabase(self.file, 'n')

        self.trie = MerkleDatabase(self.lmdb)
    def compute_state_hashes_wo_scheduler(self):
        """Creates a state hash from the state updates from each txn in a
        valid batch.

        Returns state_hashes (list of str): The merkle roots from state
            changes in 1 or more blocks in the yaml file.

        """

        tree = MerkleDatabase(database=DictDatabase())
        state_hashes = []
        updates = {}
        for batch in self._batches:
            b_id = batch.header_signature
            result = self._batch_results[b_id]
            if result.is_valid:
                for txn in batch.transactions:
                    txn_id = txn.header_signature
                    _, address_values = self._txn_execution[txn_id]
                    batch_updates = {}
                    for pair in address_values:
                        batch_updates.update({a: pair[a] for a in pair.keys()})
                    # since this is entirely serial, any overwrite
                    # of an address is expected and desirable.
                    updates.update(batch_updates)
            # This handles yaml files that have state roots in them
            if result.state_hash is not None:
                s_h = tree.update(set_items=updates, virtual=False)
                tree.set_merkle_root(merkle_root=s_h)
                state_hashes.append(s_h)
        if len(state_hashes) == 0:
            state_hashes.append(tree.update(set_items=updates))
        return state_hashes
Ejemplo n.º 4
0
    def setUp(self):
        self.dir = tempfile.mkdtemp()
        self.file = os.path.join(self.dir, 'merkle.lmdb')

        self.lmdb = lmdb_nolock_database.LMDBNoLockDatabase(self.file, 'n')

        self.trie = MerkleDatabase(self.lmdb)
Ejemplo n.º 5
0
def make_db_and_store(size=3, start='a'):
    """
    Creates and returns three related objects for testing:
        * database - dict database with evolving state
        * store - blocks with root hashes corresponding to that state
        * roots - list of root hashes used in order
    With defaults, the values at the three roots look like this:
        * 0 - {'a': b'1'}
        * 1 - {'a': b'2', 'b': b'4'}
        * 2 - {'a': b'3', 'b': b'5', 'c': b'7'}
    """
    database = DictDatabase()
    store = MockBlockStore(size=0);
    roots = []

    merkle = MerkleDatabase(database)
    data = {}

    for i in range(size):
        for k, v in data.items():
            data[k] = str(int(v) + 1).encode()
        data[_increment_key(start, i)] = str(i * size + 1).encode()

        root = merkle.update(data, virtual=False)
        roots.append(root)
        store.add_block(str(i), root)

    return database, store, roots
Ejemplo n.º 6
0
    def get(self, context_id, address_list):
        """Get the values associated with list of addresses, for a specific
        context referenced by context_id.

        Args:
            context_id (str): the return value of create_context, referencing
                a particular context.
            address_list (list): a list of address strs

        Returns:
            values_list (list): a list of (address, value) tuples

        Raises:
            AuthorizationException: Raised when an address in address_list is
                not authorized either by not being in the inputs for the
                txn associated with this context, or it is under a namespace
                but the characters that are under the namespace are not valid
                address characters.
        """

        if context_id not in self._contexts:
            return []
        for add in address_list:
            if not self.address_is_valid(address=add):
                raise AuthorizationException(address=add)

        context = self._contexts[context_id]

        addresses_in_ctx = [add for add in address_list if add in context]
        addresses_not_in_ctx = list(set(address_list) - set(addresses_in_ctx))

        values = context.get(addresses_in_ctx)
        values_list = list(zip(addresses_in_ctx, values))
        if addresses_not_in_ctx:
            # Validate the addresses that won't be validated by a direct get on
            # the context.
            for address in addresses_not_in_ctx:
                context.validate_read(address)
            address_values, reads = self._find_address_values_in_chain(
                base_contexts=[context_id],
                addresses_to_find=addresses_not_in_ctx)

            values_list.extend(address_values)

            if reads:
                tree = MerkleDatabase(self._database, context.merkle_root)
                add_values = []
                for add in reads:
                    value = None
                    try:
                        value = tree.get(add)
                    except KeyError:
                        # The address is not in the radix tree/merkle tree
                        pass
                    add_values.append((add, value))
                values_list.extend(add_values)

            values_list.sort(key=lambda x: address_list.index(x[0]))

        return values_list
Ejemplo n.º 7
0
def make_db_and_store(size=3, start='a'):
    """
    Creates and returns three related objects for testing:
        * database - dict database with evolving state
        * store - blocks with root hashes corresponding to that state
        * roots - list of root hashes used in order
    With defaults, the values at the three roots look like this:
        * 0 - {'a': b'1'}
        * 1 - {'a': b'2', 'b': b'4'}
        * 2 - {'a': b'3', 'b': b'5', 'c': b'7'}
    """
    database = DictDatabase()
    store = MockBlockStore(size=0)
    roots = []

    merkle = MerkleDatabase(database)
    data = {}

    for i in range(size):
        for k, v in data.items():
            data[k] = str(int(v) + 1).encode()
        data[_increment_key(start, i)] = str(i * size + 1).encode()

        root = merkle.update(data, virtual=False)
        roots.append(root)
        store.add_block(str(i), root)

    return database, store, roots
Ejemplo n.º 8
0
    def setUp(self):
        self.dir = tempfile.mkdtemp()
        self.file = os.path.join(self.dir, 'merkle.lmdb')

        self.lmdb = NativeLmdbDatabase(self.file, _size=120 * 1024 * 1024)

        self.trie = MerkleDatabase(self.lmdb)
Ejemplo n.º 9
0
    def setUp(self):
        self._temp_dir = tempfile.mkdtemp()

        self._database = NativeLmdbDatabase(
            os.path.join(self._temp_dir, 'test_identity_view.lmdb'),
            _size=10 * 1024 * 1024)
        self._tree = MerkleDatabase(self._database)
Ejemplo n.º 10
0
 def run(self):
     # start once and works all time
     #LOGGER.debug('_ContextReader: run \n')
     while True:
         context_state_addresslist_tuple = self._addresses.get(block=True)
         if context_state_addresslist_tuple is _SHUTDOWN_SENTINEL:
             break
         c_id, state_hash, address_list = context_state_addresslist_tuple
         #LOGGER.debug('_ContextReader: run state_hash=%s\n',state_hash)
         tree = MerkleDatabase(self._database, state_hash)
         """
         # for testing only
         # check state for testing
         try:
             tree._get_by_addr("449095bc5d9deba00a635d8db93c9deeb043416204f494b9f07862e9445559f0185109")
             LOGGER.debug('_ContextReader: ADDRESS YES \n')
         except :
             LOGGER.debug('_ContextReader: ADDRESS NO \n')
         """
         return_values = []
         for address in address_list:
             value = None
             try:
                 value = tree.get(address)
             except KeyError:
                 pass
             return_values.append((address, value))
         self._inflated_addresses.put((c_id, return_values))
Ejemplo n.º 11
0
    def test_squash(self):
        """Tests that squashing a context based on state from other
        contexts will result in the same merkle hash as updating the
        merkle tree with the same data.

        Notes:
            Set up the context

            Test:
                1) Make set calls on several of the addresses.
                2) Squash the context to get a new state hash.
                3) Apply all of the aggregate sets from all
                of the contexts, to another database with a merkle tree.
                4) Assert that the state hashes are the same.
                5) Assert that the state deltas have been stored
        """
        # 1)
        context_id = self._setup_context()
        self.context_manager.set(
            context_id,
            [{self._create_address(a): v} for a, v in
             [('yyyy', b'2'),
              ('tttt', b'4')]])

        # 2)
        squash = self.context_manager.get_squash_handler()
        resulting_state_hash = squash(self.first_state_hash, [context_id],
                                      persist=True, clean_up=True)

        # 3)
        final_state_to_update = {self._create_address(a): v for a, v in
                                 [('llaa', b'1'),
                                  ('aall', b'2'),
                                  ('nnnn', b'3'),
                                  ('zzzz', b'9'),
                                  ('yyyy', b'2'),
                                  ('tttt', b'4'),
                                  ('qqqq', b'13'),
                                  ('oooo', b'25'),
                                  ('oozz', b'26'),
                                  ('zzoo', b'27'),
                                  ('ppoo', b'28'),
                                  ('aeio', b'29')]}

        test_merkle_tree = MerkleDatabase(self.database_results)
        test_resulting_state_hash = test_merkle_tree.update(
            final_state_to_update, virtual=False)
        # 4)
        self.assertEqual(resulting_state_hash, test_resulting_state_hash)
        state_changes = self.state_delta_store.get_state_deltas(
            resulting_state_hash)

        # 5)
        for addr, value in final_state_to_update.items():
            expected_state_change = StateChange(
                address=addr,
                value=value,
                type=StateChange.SET)

            self.assertTrue(expected_state_change in state_changes)
Ejemplo n.º 12
0
        def _squash(state_root, context_ids, persist):
            tree = MerkleDatabase(self._database, state_root)
            updates = dict()
            for c_id in context_ids:
                context = self._contexts[c_id]
                for add in context.get_state().keys():
                    if add in updates:
                        raise SquashException(
                            "Duplicate address {} in context {}".format(
                                add, c_id))

                effective_updates = {}
                for k, val_fut in context.get_state().items():
                    value = val_fut.result()
                    if value is not None:
                        effective_updates[k] = value

                updates.update(effective_updates)

            if len(updates) == 0:
                return state_root

            virtual = not persist
            state_hash = tree.update(updates, virtual=virtual)
            if persist:
                # clean up all contexts that are involved in being squashed.
                base_c_ids = []
                for c_id in context_ids:
                    base_c_ids += self._contexts[c_id].base_context_ids
                all_context_ids = base_c_ids + context_ids
                self.delete_context(all_context_ids)
            return state_hash
Ejemplo n.º 13
0
        def _squash(state_root, context_ids, persist, clean_up):
            contexts_in_chain = deque()
            contexts_in_chain.extend(context_ids)
            context_ids_already_searched = []
            context_ids_already_searched.extend(context_ids)

            # There is only one exit condition and that is when all the
            # contexts have been accessed once.
            updates = dict()
            deletes = set()
            while contexts_in_chain:
                current_c_id = contexts_in_chain.popleft()
                current_context = self._contexts[current_c_id]
                if not current_context.is_read_only():
                    current_context.make_read_only()

                addresses_w_values = current_context.get_all_if_set()
                for add, val in addresses_w_values.items():
                    # Since we are moving backwards through the graph of
                    # contexts, only update if the address hasn't been set
                    # or deleted
                    if add not in updates and add not in deletes:
                        updates[add] = val

                addresses_w_values = current_context.get_all_if_deleted()
                for add, _ in addresses_w_values.items():
                    # Since we are moving backwards through the graph of
                    # contexts, only add to deletes if the address hasn't been
                    # previously deleted or set in the graph
                    if add not in updates and add not in deletes:
                        deletes.add(add)

                for c_id in current_context.base_contexts:
                    if c_id not in context_ids_already_searched:
                        contexts_in_chain.append(c_id)
                        context_ids_already_searched.append(c_id)

            tree = MerkleDatabase(self._database, state_root)

            # filter the delete list to just those items in the tree
            deletes = [addr for addr in deletes if addr in tree]

            if not updates and not deletes:
                return state_root

            virtual = not persist
            state_hash = tree.update(updates, deletes, virtual=virtual)
            if persist:
                # save the state changes to the state_delta_store
                changes = [StateChange(address=addr,
                                       value=value,
                                       type=StateChange.SET)
                           for addr, value in updates.items()] +\
                          [StateChange(address=addr,
                                       type=StateChange.DELETE)
                           for addr in deletes]
                self._state_delta_store.save_state_deltas(state_hash, changes)
            if clean_up:
                self.delete_contexts(context_ids_already_searched)
            return state_hash
Ejemplo n.º 14
0
def make_db_and_store(size=3):
    database = DictDatabase()
    store = MockBlockStore(size=0)
    roots = []

    merkle = MerkleDatabase(database)
    data = {}

    # Create all the keys that will be used. Keys are zero-padded hex strings
    # starting with '1'.
    keys = [format(i, 'x').zfill(70) for i in range(1, size + 1)]

    for i in range(1, size + 1):
        # Construct the state for this root
        data = {}
        for key_idx in range(i):
            key = keys[key_idx]
            # Calculate unique values based on the key and root
            val = i + (2 * key_idx)
            data[key] = str(val).encode()

        root = merkle.update(data, virtual=False)
        roots.append(root)
        store.add_block(str(i), root)

    return database, store, roots
Ejemplo n.º 15
0
    def setUp(self):
        self._temp_dir = tempfile.mkdtemp()

        self._database = NativeLmdbDatabase(
            os.path.join(self._temp_dir, 'test_identity_view.lmdb'),
            indexes=MerkleDatabase.create_index_configuration(),
            _size=10 * 1024 * 1024)
        self._tree = MerkleDatabase(self._database)
Ejemplo n.º 16
0
    def setUp(self):
        self._temp_dir = tempfile.mkdtemp()

        self._database = NativeLmdbDatabase(
            os.path.join(self._temp_dir, 'test_identity_view.lmdb'),
            indexes=MerkleDatabase.create_index_configuration(),
            _size=10 * 1024 * 1024)
        self._tree = MerkleDatabase(self._database)
Ejemplo n.º 17
0
    def setUp(self):
        self.dir = tempfile.mkdtemp()
        self.file = os.path.join(self.dir, 'merkle.lmdb')

        self.lmdb = NativeLmdbDatabase(
            self.file,
            indexes=MerkleDatabase.create_index_configuration(),
            _size=120 * 1024 * 1024)

        self.trie = MerkleDatabase(self.lmdb)
Ejemplo n.º 18
0
    def setUp(self):
        self.dir = tempfile.mkdtemp()
        self.file = os.path.join(self.dir, 'merkle.lmdb')

        self.lmdb = NativeLmdbDatabase(
            self.file,
            indexes=MerkleDatabase.create_index_configuration(),
            _size=120 * 1024 * 1024)

        self.trie = MerkleDatabase(self.lmdb)
Ejemplo n.º 19
0
    def test_squash(self):
        """Tests that squashing a context based on state from other
        contexts will result in the same merkle hash as updating the
        merkle tree with the same data.

        Notes:
            Set up the context

            Test:
                1) Make set calls on several of the addresses.
                2) Squash the context to get a new state hash.
                3) Apply all of the aggregate sets from all
                of the contexts, to another database with a merkle tree.
                4) Assert that the state hashes are the same.
                5) Assert that the state deltas have been stored
        """
        # 1)
        context_id = self._setup_context()
        self.context_manager.set(context_id, [{
            self._create_address(a): v
        } for a, v in [('yyyy', b'2'), ('tttt', b'4')]])

        # 2)
        squash = self.context_manager.get_squash_handler()
        resulting_state_hash = squash(self.first_state_hash, [context_id],
                                      persist=True,
                                      clean_up=True)

        # 3)
        final_state_to_update = {
            self._create_address(a): v
            for a, v in [('llaa', b'1'), ('aall', b'2'), (
                'nnnn', b'3'), ('zzzz', b'9'), ('yyyy', b'2'), (
                    'tttt',
                    b'4'), ('qqqq', b'13'), ('oooo', b'25'), (
                        'oozz', b'26'), ('zzoo',
                                         b'27'), ('ppoo',
                                                  b'28'), ('aeio', b'29')]
        }

        test_merkle_tree = MerkleDatabase(self.database_results)
        test_resulting_state_hash = test_merkle_tree.update(
            final_state_to_update, virtual=False)
        # 4)
        self.assertEqual(resulting_state_hash, test_resulting_state_hash)
        state_changes = self.state_delta_store.get_state_deltas(
            resulting_state_hash)

        # 5)
        for addr, value in final_state_to_update.items():
            expected_state_change = StateChange(address=addr,
                                                value=value,
                                                type=StateChange.SET)

            self.assertTrue(expected_state_change in state_changes)
Ejemplo n.º 20
0
    def commit_context(self, context_id_list, virtual):
        """ Only used in a test ---
        Commits the state from the contexts referred to in context_id_list
        to the merkle tree.

        Args:
            context_id_list (list of str): The context ids with state to
                commit to the merkle tree.
            virtual (bool): True if the data in contexts shouldn't be
                written to the merkle tree, but just return a merkle root.

        Returns:
            state_hash (str): the new state hash after the context_id_list
                              has been committed

        """
        if any([c_id not in self._contexts for c_id in context_id_list]):
            raise CommitException("Context Id not in contexts")
        first_id = context_id_list[0]

        if not all([
                self._contexts[first_id].merkle_root
                == self._contexts[c_id].merkle_root for c_id in context_id_list
        ]):
            raise CommitException(
                "MerkleRoots not all equal, yet asking to merge")

        merkle_root = self._contexts[first_id].merkle_root
        tree = MerkleDatabase(self._database, merkle_root=merkle_root)
        updates = dict()
        for c_id in context_id_list:
            context = self._contexts[c_id]
            for add in context.get_state().keys():
                if add in updates:
                    raise CommitException(
                        "Duplicate address {} in context {}".format(add, c_id))

            effective_updates = {}
            for k, val_fut in context.get_state().items():
                value = val_fut.result()
                if value is not None:
                    effective_updates[k] = value

            updates.update(effective_updates)

        state_hash = tree.update(updates, virtual=False)
        # clean up all contexts that are involved in being squashed.
        base_c_ids = []
        for c_id in context_id_list:
            base_c_ids += self._contexts[c_id].base_context_ids
        all_context_ids = base_c_ids + context_id_list
        self.delete_context(all_context_ids)

        return state_hash
Ejemplo n.º 21
0
    def test_empty_batch_file_should_produce_block(
        self, mock_scheduler_complete
    ):
        """
        In this case, the genesis batch, even with an empty list of batches,
        should produce a genesis block.
        Also:
         - the genesis.batch file should be deleted
         - the block_chain_id file should be created and populated
        """
        genesis_file = self._with_empty_batch_file()
        block_store = self.make_block_store()
        block_manager = BlockManager()
        block_manager.add_commit_store(block_store)

        state_database = NativeLmdbDatabase(
            os.path.join(self._temp_dir, 'test_genesis.lmdb'),
            indexes=MerkleDatabase.create_index_configuration(),
            _size=10 * 1024 * 1024)
        merkle_db = MerkleDatabase(state_database)

        ctx_mgr = Mock(name='ContextManager')
        ctx_mgr.get_squash_handler.return_value = Mock()
        ctx_mgr.get_first_root.return_value = merkle_db.get_merkle_root()

        txn_executor = Mock(name='txn_executor')
        completer = Mock('completer')
        completer.add_block = Mock('add_block')

        genesis_ctrl = GenesisController(
            context_manager=ctx_mgr,
            transaction_executor=txn_executor,
            completer=completer,
            block_store=block_store,
            state_view_factory=StateViewFactory(state_database),
            identity_signer=self._signer,
            block_manager=block_manager,
            data_dir=self._temp_dir,
            config_dir=self._temp_dir,
            chain_id_manager=ChainIdManager(self._temp_dir),
            batch_sender=Mock('batch_sender'),
            receipt_store=MagicMock())

        on_done_fn = Mock(return_value='')
        genesis_ctrl.start(on_done_fn)

        self.assertEqual(False, os.path.exists(genesis_file))

        self.assertEqual(True, block_store.chain_head is not None)
        self.assertEqual(1, on_done_fn.call_count)
        self.assertEqual(1, completer.add_block.call_count)
        self.assertEqual(block_store.chain_head.identifier,
                         self._read_block_chain_id())
Ejemplo n.º 22
0
    def test_empty_batch_file_should_produce_block(
        self, mock_scheduler_complete
    ):
        """
        In this case, the genesis batch, even with an empty list of batches,
        should produce a genesis block.
        Also:
         - the genesis.batch file should be deleted
         - the block_chain_id file should be created and populated
        """
        genesis_file = self._with_empty_batch_file()
        block_store = self.make_block_store()
        block_manager = BlockManager()
        block_manager.add_commit_store(block_store)

        state_database = NativeLmdbDatabase(
            os.path.join(self._temp_dir, 'test_genesis.lmdb'),
            indexes=MerkleDatabase.create_index_configuration(),
            _size=10 * 1024 * 1024)
        merkle_db = MerkleDatabase(state_database)

        ctx_mgr = Mock(name='ContextManager')
        ctx_mgr.get_squash_handler.return_value = Mock()
        ctx_mgr.get_first_root.return_value = merkle_db.get_merkle_root()

        txn_executor = Mock(name='txn_executor')
        completer = Mock('completer')
        completer.add_block = Mock('add_block')

        genesis_ctrl = GenesisController(
            context_manager=ctx_mgr,
            transaction_executor=txn_executor,
            completer=completer,
            block_store=block_store,
            state_view_factory=StateViewFactory(state_database),
            identity_signer=self._signer,
            block_manager=block_manager,
            data_dir=self._temp_dir,
            config_dir=self._temp_dir,
            chain_id_manager=ChainIdManager(self._temp_dir),
            batch_sender=Mock('batch_sender'))

        on_done_fn = Mock(return_value='')
        genesis_ctrl.start(on_done_fn)

        self.assertEqual(False, os.path.exists(genesis_file))

        self.assertEqual(True, block_store.chain_head is not None)
        self.assertEqual(1, on_done_fn.call_count)
        self.assertEqual(1, completer.add_block.call_count)
        self.assertEqual(block_store.chain_head.identifier,
                         self._read_block_chain_id())
Ejemplo n.º 23
0
class TestSawtoothMerkleTrie:
    def __init__(self):
        self.dir = '/tmp/sawtooth'  # tempfile.mkdtemp()
        self.file = os.path.join(self.dir, 'merkle.lmdb')

        self.lmdb = lmdb_nolock_database.LMDBNoLockDatabase(self.file, 'n')

        self.trie = MerkleDatabase(self.lmdb)

    def __enter__(self):
        return self

    def __exit__(self, exc_type, exc_value, traceback):
        self.trie.close()

    # assertions
    def assert_value_at_address(self, address, value, ishash=False):
        assert self.get(address, ishash) == value

    def assert_no_key(self, key):
        with pytest.raises(KeyError):
            self.get(key)

    def assert_root(self, expected):
        assert expected == self.get_merkle_root()

    def assert_not_root(self, *not_roots):
        root = self.get_merkle_root()
        for not_root in not_roots:
            assert root != not_root

    # trie accessors

    # For convenience, assume keys are not hashed
    # unless otherwise indicated.

    def set(self, key, val, ishash=False):
        key_ = key if ishash else _hash(key)
        return self.trie.set(key_, val)

    def get(self, key, ishash=False):
        key_ = key if ishash else _hash(key)
        return self.trie.get(key_)

    def delete(self, key, ishash=False):
        key_ = key if ishash else _hash(key)
        return self.trie.delete(key_)

    def set_merkle_root(self, root):
        self.trie.set_merkle_root(root)

    def get_merkle_root(self):
        return self.trie.get_merkle_root()

    def update(self, set_items, delete_items=None, virtual=True):
        return self.trie.update(set_items, delete_items, virtual=virtual)
Ejemplo n.º 24
0
        def _squash(state_root, context_ids, persist, clean_up):
            contexts_in_chain = deque()
            contexts_in_chain.extend(context_ids)
            context_ids_already_searched = []
            context_ids_already_searched.extend(context_ids)

            # There is only one exit condition and that is when all the
            # contexts have been accessed once.
            updates = dict()
            deletes = set()
            while contexts_in_chain:
                current_c_id = contexts_in_chain.popleft()
                current_context = self._contexts[current_c_id]
                if not current_context.is_read_only():
                    current_context.make_read_only()

                addresses_w_values = current_context.get_all_if_set()
                for add, val in addresses_w_values.items():
                    # Since we are moving backwards through the graph of
                    # contexts, only update if the address hasn't been set
                    # or deleted
                    if add not in updates and add not in deletes:
                        updates[add] = val

                addresses_w_values = current_context.get_all_if_deleted()
                for add, _ in addresses_w_values.items():
                    # Since we are moving backwards through the graph of
                    # contexts, only add to deletes if the address hasn't been
                    # previously deleted or set in the graph
                    if add not in updates and add not in deletes:
                        deletes.add(add)

                for c_id in current_context.base_contexts:
                    if c_id not in context_ids_already_searched:
                        contexts_in_chain.append(c_id)
                        context_ids_already_searched.append(c_id)

            tree = MerkleDatabase(self._database, state_root)

            # filter the delete list to just those items in the tree
            deletes = [addr for addr in deletes if addr in tree]

            if not updates and not deletes:
                state_hash = state_root
            else:
                virtual = not persist
                state_hash = tree.update(updates, deletes, virtual=virtual)

            if clean_up:
                self.delete_contexts(context_ids_already_searched)
            return state_hash
Ejemplo n.º 25
0
    def setUp(self):
        database = DictDatabase()
        state_view_factory = StateViewFactory(database)
        self._config_view_factory = ConfigViewFactory(state_view_factory)

        merkle_db = MerkleDatabase(database)
        self._current_root_hash = merkle_db.update({
            TestConfigView._address('my.setting'):
                TestConfigView._setting_entry('my.setting', '10'),
            TestConfigView._address('my.setting.list'):
                TestConfigView._setting_entry('my.setting.list', '10,11,12'),
            TestConfigView._address('my.other.list'):
                TestConfigView._setting_entry('my.other.list', '13;14;15')
        }, virtual=False)
Ejemplo n.º 26
0
 def run(self):
     while True:
         context_state_addresslist_tuple = self._addresses.get(block=True)
         c_id, state_hash, address_list = context_state_addresslist_tuple
         tree = MerkleDatabase(self._database, state_hash)
         return_values = []
         for address in address_list:
             value = None
             try:
                 value = tree.get(address)
             except KeyError:
                 pass
             return_values.append((address, value))
         self._inflated_addresses.put((c_id, return_values))
Ejemplo n.º 27
0
 def run(self):
     while True:
         context_state_addresslist_tuple = self._addresses.get(block=True)
         c_id, state_hash, address_list = context_state_addresslist_tuple
         tree = MerkleDatabase(self._database, state_hash)
         return_values = []
         for address in address_list:
             value = None
             try:
                 value = tree.get(address)
             except KeyError:
                 pass
             return_values.append((address, value))
         self._inflated_addresses.put((c_id, return_values))
Ejemplo n.º 28
0
    def test_merkle_trie_root_advance(self):
        value = {"name": "foo", "value": 1}

        orig_root = self.trie.get_merkle_root()
        new_root = self.trie.set(MerkleDatabase.hash("foo"),
                                 value)

        with self.assertRaises(KeyError):
            self.trie.get(MerkleDatabase.hash("foo"))

        self.trie.set_merkle_root(new_root)

        self.assertEqual(self.trie.get(MerkleDatabase.hash("foo")),
                         value)
Ejemplo n.º 29
0
    def setUp(self):
        database = DictDatabase()
        state_view_factory = StateViewFactory(database)
        self._settings_view_factory = SettingsViewFactory(state_view_factory)

        merkle_db = MerkleDatabase(database)
        self._current_root_hash = merkle_db.update({
            TestSettingsView._address('my.setting'):
                TestSettingsView._setting_entry('my.setting', '10'),
            TestSettingsView._address('my.setting.list'):
                TestSettingsView._setting_entry('my.setting.list', '10,11,12'),
            TestSettingsView._address('my.other.list'):
                TestSettingsView._setting_entry('my.other.list', '13;14;15')
        }, virtual=False)
Ejemplo n.º 30
0
    def test_squash(self):
        """Tests that squashing a context based on state from other
        contexts will result in the same merkle hash as updating the
        merkle tree with the same data.

        Notes:
            Set up the context

            Test:
                1) Make set calls on several of the addresses.
                2) Squash the context to get a new state hash.
                3) Apply all of the aggregate sets from all
                of the contexts, to another database with a merkle tree.
                4) Assert that the state hashes are the same.
                5) Assert that the state deltas have been stored
        """
        # 1)
        context_id = self._setup_context()
        self.context_manager.set(context_id, [{'bbbb': b'2'}, {'eeee': b'4'}])

        # 2)
        squash = self.context_manager.get_squash_handler()
        resulting_state_hash = squash(self.first_state_hash, [context_id],
                                      persist=True)

        # 3)
        final_state_to_update = {
            'aaaa': b'25',
            'bbbb': b'2',
            'cccc': b'27',
            'dddd': b'28',
            'eeee': b'4'
        }

        test_merkle_tree = MerkleDatabase(self.database_results)
        test_resulting_state_hash = test_merkle_tree.update(
            final_state_to_update, virtual=False)
        # 4)
        self.assertEqual(resulting_state_hash, test_resulting_state_hash)
        state_changes = self.state_delta_store.get_state_deltas(
            resulting_state_hash)

        # 5)
        for addr, value in final_state_to_update.items():
            expected_state_change = StateChange(address=addr,
                                                value=value,
                                                type=StateChange.SET)

            self.assertTrue(expected_state_change in state_changes)
Ejemplo n.º 31
0
 def _squash(state_root, context_ids):
     tree = MerkleDatabase(self._database, state_root)
     updates = dict()
     for c_id in context_ids:
         with self._shared_lock:
             context = self._contexts[c_id]
         for add in context.get_address_value_dict().keys():
             if add in updates:
                 raise SquashException(
                     "Duplicate address {} in context {}".format(
                         add, c_id))
         updates.update({k: v.result() for k, v in
                         context.get_address_value_dict().items()})
     state_hash = tree.update(updates, virtual=False)
     return state_hash
Ejemplo n.º 32
0
        def _recompute_state_hash(state_root, context=None):
            # for DAG only - recompute state
            state_hash = None
            try:
                tree = MerkleDatabase(self._database, state_root)
                state_hash = tree.update(context['updates'],
                                         context['deletes'],
                                         virtual=True)
                LOGGER.debug('_recompute_state_hash: STATE=%s->%s\n',
                             state_root[:8], state_hash[:8])
            except:
                LOGGER.debug('_recompute_state_hash: BAD STATE=%s\n',
                             state_root[:8])

            return state_hash
Ejemplo n.º 33
0
    def commit_context(self, context_id_list, virtual):
        """
        Part of the interface to the Executor
        Args:
            context_id_list:

        Returns:
            state_hash (str): the new state hash after the context_id_list
                              has been committed

        """

        if any([c_id not in self._contexts for c_id in context_id_list]):
            raise CommitException("Context Id not in contexts")
        first_id = context_id_list[0]

        if not all([
                self._contexts[first_id].merkle_root
                == self._contexts[c_id].merkle_root for c_id in context_id_list
        ]):
            raise CommitException(
                "MerkleRoots not all equal, yet asking to merge")

        merkle_root = self._contexts[first_id].merkle_root
        tree = MerkleDatabase(self._database, merkle_root)

        merged_updates = {}
        for c_id in context_id_list:
            with self._shared_lock:
                context = self._contexts[c_id]
                del self._contexts[c_id]
            for k in context.get_writable_address_value_dict().keys():
                if k in merged_updates:
                    raise CommitException(
                        "Duplicate address {} in context {}".format(k, c_id))
            merged_updates.update(context.get_writable_address_value_dict())

        new_root = merkle_root

        add_value_dict = {}
        for k, val_fut in merged_updates.items():
            value = val_fut.result()
            if value is not None:
                add_value_dict[k] = value

        new_root = tree.update(set_items=add_value_dict, virtual=virtual)

        return new_root
Ejemplo n.º 34
0
class StateGetRequest(Handler):
    def __init__(self, database, block_store):
        self._tree = MerkleDatabase(database)
        self._block_store = block_store

    def handle(self, identity, message_content):
        helper = _ClientHelper(message_content,
                               client_pb2.ClientStateGetRequest,
                               client_pb2.ClientStateGetResponse,
                               validator_pb2.Message.CLIENT_STATE_GET_RESPONSE,
                               tree=self._tree,
                               block_store=self._block_store)

        helper.set_root()
        if helper.has_response():
            return helper.result

        # Fetch leaf value
        address = helper.request.address
        try:
            value = self._tree.get(address)
        except KeyError:
            LOGGER.debug('Unable to find entry at address %s', address)
            helper.set_response(helper.status.NO_RESOURCE)
        except ValueError as e:
            LOGGER.debug('Address %s is a nonleaf', address)
            LOGGER.debug(e)
            helper.set_response(helper.status.MISSING_ADDRESS)

        if not helper.has_response():
            helper.set_response(helper.status.OK,
                                head_id=helper.head_id,
                                value=value)

        return helper.result
Ejemplo n.º 35
0
class StateListRequest(Handler):
    def __init__(self, database, block_store):
        self._tree = MerkleDatabase(database)
        self._block_store = block_store

    def handle(self, identity, message_content):
        helper = _ClientHelper(
            message_content,
            client_pb2.ClientStateListRequest,
            client_pb2.ClientStateListResponse,
            validator_pb2.Message.CLIENT_STATE_LIST_RESPONSE,
            tree=self._tree,
            block_store=self._block_store)

        helper.set_root()
        if helper.has_response():
            return helper.result

        # Fetch leaves and encode as protobuf
        leaves = [
            client_pb2.Leaf(address=a, data=v) for a, v in self._tree.leaves(
                helper.request.address or '').items()
        ]

        if leaves:
            helper.set_response(helper.status.OK,
                                head_id=helper.head_id,
                                leaves=leaves)
        else:
            helper.set_response(helper.status.NO_RESOURCE,
                                head_id=helper.head_id)

        return helper.result
Ejemplo n.º 36
0
    def test_merkle_trie_delete(self):
        value = {"name": "bar", "value": 1}

        new_root = self.trie.set(MerkleDatabase.hash("bar"), value)

        self.trie.set_merkle_root(new_root)

        self.assertEqual(self.trie.get(MerkleDatabase.hash("bar")),
                         value)

        del_root = self.trie.delete(MerkleDatabase.hash("bar"))

        self.trie.set_merkle_root(del_root)

        with self.assertRaises(KeyError):
            self.trie.get(MerkleDatabase.hash("bar"))
Ejemplo n.º 37
0
 def __init__(self, database, block_store):
     super().__init__(
         client_pb2.ClientStateGetRequest,
         client_pb2.ClientStateGetResponse,
         validator_pb2.Message.CLIENT_STATE_GET_RESPONSE,
         tree=MerkleDatabase(database),
         block_store=block_store)
Ejemplo n.º 38
0
def get_databases(bind_network, data_dir, database=None):
    # Get the global state database to operate on
    global_state_db_filename = os.path.join(
        data_dir, 'merkle-{}.lmdb'.format(bind_network[-2:]))
    LOGGER.debug('verifying state in %s', global_state_db_filename)
    global_state_db = NativeLmdbDatabase(
        global_state_db_filename,
        indexes=MerkleDatabase.create_index_configuration())

    if database:
        LOGGER.debug('get_databases: OPEN ORIENTDB uri=%s', database)
        block_db = OrientDatabase(
            database,
            BlockStore.serialize_block,
            BlockStore.deserialize_block,
            indexes=BlockStore.create_index_configuration(),
            flag='c')
        LOGGER.debug('get_databases:OPEN ORIENT DB DONE %s', block_db)
    else:
        # Get the blockstore
        block_db_filename = os.path.join(
            data_dir, 'block-{}.lmdb'.format(bind_network[-2:]))
        LOGGER.debug('block store file is %s', block_db_filename)
        block_db = IndexedDatabase(
            block_db_filename,
            BlockStore.serialize_block,
            BlockStore.deserialize_block,
            flag='c',
            indexes=BlockStore.create_index_configuration())

    blockstore = BlockStore(block_db)

    return global_state_db, blockstore
Ejemplo n.º 39
0
    def create_view(self, state_root_hash=None):
        """Creates a StateView for the given state root hash.

        Args:
            state_root_hash (str): The state root hash of the state view
                to return.  If None, returns the state view for the
        Returns:
            StateView: state view locked to the given root hash.
        """
        # Create a default Merkle database and if we have a state root hash,
        # update the Merkle database's root to that
        merkle_db = MerkleDatabase(self._database)
        if state_root_hash is not None:
            merkle_db.set_merkle_root(state_root_hash)

        return StateView(merkle_db)
Ejemplo n.º 40
0
    def create_view(self, state_root_hash=None):
        """Creates a StateView for the given state root hash.

        Args:
            state_root_hash (str): The state root hash of the state view
                to return.  If None, returns the state view for the
        Returns:
            StateView: state view locked to the given root hash.
        """
        # Create a default Merkle database and if we have a state root hash,
        # update the Merkle database's root to that
        merkle_db = MerkleDatabase(self._database)
        if state_root_hash is not None:
            merkle_db.set_merkle_root(state_root_hash)

        return StateView(merkle_db)
Ejemplo n.º 41
0
    def test_state_verifier(self):
        blockstore = BlockStore(DictDatabase(
            indexes=BlockStore.create_index_configuration()))
        global_state_db = NativeLmdbDatabase(
            os.path.join(self._temp_dir, 'test_state_verifier.lmdb'),
            indexes=MerkleDatabase.create_index_configuration())

        precalculated_state_roots = [
            "e35490eac6f77453675c3399da7efe451e791272bbc8cf1b032c75030fb455c3",
            "3a369eb951171895c00ba2ffd04bfa1ef98d6ee651f96a65ae3280cf8d67d5e7",
            "797e70e29915c9129f950b2084ed0e3c09246bd1e6c232571456f51ca85df340",
        ]

        signer = get_signer()
        populate_blockstore(blockstore, signer, precalculated_state_roots)

        verify_state(
            global_state_db,
            blockstore,
            "tcp://eth0:4004",
            "serial")

        # There is a bug in the shutdown code for some component this depends
        # on, which causes it to occassionally hang during shutdown. Just kill
        # the process for now.
        # pylint: disable=protected-access
        os._exit(0)
Ejemplo n.º 42
0
    def test_state_verifier(self):
        block_store_db = NativeLmdbDatabase(
            os.path.join(self._temp_dir,
                         'test_state_verifier_block_store_db.lmdb'),
            indexes=BlockStore.create_index_configuration())
        blockstore = BlockStore(block_store_db)
        global_state_db = NativeLmdbDatabase(
            os.path.join(self._temp_dir,
                         'test_state_verifier_global_state.lmdb'),
            indexes=MerkleDatabase.create_index_configuration())

        precalculated_state_roots = [
            "e35490eac6f77453675c3399da7efe451e791272bbc8cf1b032c75030fb455c3",
            "3a369eb951171895c00ba2ffd04bfa1ef98d6ee651f96a65ae3280cf8d67d5e7",
            "797e70e29915c9129f950b2084ed0e3c09246bd1e6c232571456f51ca85df340",
        ]

        signer = get_signer()
        populate_blockstore(blockstore, signer, precalculated_state_roots)

        verify_state(global_state_db, blockstore, "tcp://eth0:4004", "serial")

        # There is a bug in the shutdown code for some component this depends
        # on, which causes it to occassionally hang during shutdown. Just kill
        # the process for now.
        # pylint: disable=protected-access
        os._exit(0)
Ejemplo n.º 43
0
    def create_view(self, state_root_hash):
        """Creates a StateView for the given state root hash.

        Returns:
            StateView: state view locked to the given root hash.
        """
        return StateView(MerkleDatabase(self._database, state_root_hash))
Ejemplo n.º 44
0
    def setUp(self):
        self._temp_dir = tempfile.mkdtemp()
        database = NativeLmdbDatabase(
            os.path.join(self._temp_dir, 'test_config_view.lmdb'),
            indexes=MerkleDatabase.create_index_configuration(),
            _size=10 * 1024 * 1024)
        state_view_factory = StateViewFactory(database)
        self._settings_view_factory = SettingsViewFactory(state_view_factory)

        merkle_db = MerkleDatabase(database)
        self._current_root_hash = merkle_db.update({
            TestSettingsView._address('my.setting'):
                TestSettingsView._setting_entry('my.setting', '10'),
            TestSettingsView._address('my.setting.list'):
                TestSettingsView._setting_entry('my.setting.list', '10,11,12'),
            TestSettingsView._address('my.other.list'):
                TestSettingsView._setting_entry('my.other.list', '13;14;15')
        }, virtual=False)
    def setUp(self):
        self._temp_dir = tempfile.mkdtemp()

        database = NativeLmdbDatabase(
            os.path.join(self._temp_dir, 'test_state_view.lmdb'),
            indexes=MerkleDatabase.create_index_configuration(),
            _size=10 * 1024 * 1024)

        self._context_manager = ContextManager(database)
Ejemplo n.º 46
0
    def setUp(self):
        self.dir = tempfile.mkdtemp()
        self.file = os.path.join(self.dir, 'merkle.lmdb')

        self.lmdb = lmdb_nolock_database.LMDBNoLockDatabase(
            self.file,
            'n')

        self.trie = MerkleDatabase(self.lmdb)
Ejemplo n.º 47
0
        def _squash(state_root, context_ids, persist, clean_up):
            contexts_in_chain = deque()
            contexts_in_chain.extend(context_ids)
            context_ids_already_searched = []
            context_ids_already_searched.extend(context_ids)

            # There is only one exit condition and that is when all the
            # contexts have been accessed once.
            updates = dict()
            while contexts_in_chain:
                current_c_id = contexts_in_chain.popleft()
                current_context = self._contexts[current_c_id]
                if not current_context.is_read_only():
                    current_context.make_read_only()

                addresses_w_values = current_context.get_all_if_set()
                for add, val in addresses_w_values.items():
                    # Since we are moving backwards through the graph of
                    # contexts, only update if the address hasn't been set
                    if add not in updates:
                        updates[add] = val

                for c_id in current_context.base_contexts:
                    if c_id not in context_ids_already_searched:
                        contexts_in_chain.append(c_id)
                        context_ids_already_searched.append(c_id)

            if not updates:
                return state_root

            tree = MerkleDatabase(self._database, state_root)
            virtual = not persist
            state_hash = tree.update(updates, virtual=virtual)
            if persist:
                # save the state changes to the state_delta_store
                changes = [StateChange(address=addr,
                                       value=value,
                                       type=StateChange.SET)
                           for addr, value in updates.items()]
                self._state_delta_store.save_state_deltas(state_hash, changes)
            if clean_up:
                self.delete_contexts(context_ids_already_searched)
            return state_hash
Ejemplo n.º 48
0
    def commit_context(self, context_id_list, virtual):
        """
        Part of the interface to the Executor
        Args:
            context_id_list:

        Returns:
            state_hash (str): the new state hash after the context_id_list
                              has been committed

        """

        if any([c_id not in self._contexts for c_id in context_id_list]):
            raise CommitException("Context Id not in contexts")
        first_id = context_id_list[0]

        if not all([self._contexts[first_id].merkle_root ==
                    self._contexts[c_id].merkle_root
                    for c_id in context_id_list]):
            raise CommitException(
                "MerkleRoots not all equal, yet asking to merge")

        merkle_root = self._contexts[first_id].merkle_root
        tree = MerkleDatabase(self._database, merkle_root)

        merged_updates = {}
        for c_id in context_id_list:
            with self._shared_lock:
                context = self._contexts[c_id]
                del self._contexts[c_id]
            for k in context.get_writable_address_value_dict().keys():
                if k in merged_updates:
                    raise CommitException(
                        "Duplicate address {} in context {}".format(k, c_id))
            merged_updates.update(context.get_writable_address_value_dict())

        new_root = merkle_root
        add_value_dict = {address: value.result()
                          for address, value in merged_updates.items()}
        new_root = tree.update(set_items=add_value_dict, virtual=virtual)

        return new_root
Ejemplo n.º 49
0
def make_db_and_store(base_dir, size=3):
    """
    Creates and returns three related objects for testing:
        * database - dict database with evolving state
        * store - blocks with root hashes corresponding to that state
        * roots - list of root hashes used in order
    With defaults, the values at the three roots look like this:
        * 0 - {'000...1': b'1'}
        * 1 - {'000...1': b'2', '000...2': b'4'}
        * 2 - {'000...1': b'3', '000...2': b'5', '000...3': b'7'}
        * 3 - {'000...1': b'4', '000...2': b'6',
               '000...3': b'8', '000...4': b'10'}
    """
    database = NativeLmdbDatabase(
        os.path.join(base_dir, 'client_handlers_mock_db.lmdb'),
        indexes=MerkleDatabase.create_index_configuration(),
        _size=10 * 1024 * 1024)
    store = MockBlockStore(size=0)
    roots = []

    merkle = MerkleDatabase(database)
    data = {}

    # Create all the keys that will be used. Keys are zero-padded hex strings
    # starting with '1'.
    keys = [format(i, 'x').zfill(70) for i in range(1, size + 1)]

    for i in range(1, size + 1):
        # Construct the state for this root
        data = {}
        for key_idx in range(i):
            key = keys[key_idx]
            # Calculate unique values based on the key and root
            val = i + (2 * key_idx)
            data[key] = str(val).encode()

        root = merkle.update(data, virtual=False)
        roots.append(root)
        store.add_block(str(i), root)

    return database, store, roots
Ejemplo n.º 50
0
class StateGetRequestHandler(Handler):
    def __init__(self, database):
        self._tree = MerkleDatabase(database)

    def handle(self, identity, message_content):
        request = client_pb2.ClientStateGetRequest()
        resp_proto = client_pb2.ClientStateGetResponse
        status = resp_proto.OK

        try:
            request.ParseFromString(message_content)
            self._tree.set_merkle_root(request.merkle_root)
        except KeyError as e:
            status = resp_proto.NORESOURCE
            LOGGER.debug(e)
        except DecodeError:
            status = resp_proto.ERROR
            LOGGER.info("Expected protobuf of class %s failed to "
                        "deserialize", request)

        if status != resp_proto.OK:
            response = resp_proto(status=status)
        else:
            address = request.address
            try:
                value = self._tree.get(address)
            except KeyError:
                status = resp_proto.NORESOURCE
                LOGGER.debug("No entry at state address %s", address)
            except ValueError:
                status = resp_proto.NONLEAF
                LOGGER.debug("Node at state address %s is a nonleaf", address)

            response = resp_proto(status=status)
            if status == resp_proto.OK:
                response.value = value

        return HandlerResult(
            status=HandlerStatus.RETURN,
            message_out=response,
            message_type=validator_pb2.Message.CLIENT_STATE_GET_RESPONSE)
Ejemplo n.º 51
0
    def test_state_view(self):
        """Tests the StateViewFactory and its creation of StateViews

        This test exercises the following:

        1. Create an empty merkle database.
        2. Create a view into the database, asserting its emptiness.
        3. Update the database with a value, creating a new root.
        4. Create a view into the database with the new root.
        5. Verify the view does not match the previous view and contains the
           new item.
        """

        merkle_db = MerkleDatabase(self.database)

        state_view_factory = StateViewFactory(self.database)

        initial_state_view = state_view_factory.create_view(
            merkle_db.get_merkle_root())

        # test that the initial state view returns empty values
        self.assertEqual([], initial_state_view.addresses())
        self.assertEqual({}, {k: v for k, v in initial_state_view.leaves('')})
        with self.assertRaises(KeyError):
            initial_state_view.get('abcd')

        next_root = merkle_db.update({'abcd': 'hello'.encode()},
                                     virtual=False)

        next_state_view = state_view_factory.create_view(next_root)

        # Prove that the initial state view is not effected by the change
        self.assertEqual([], initial_state_view.addresses())
        self.assertEqual(['abcd'], next_state_view.addresses())

        # Check that the values can be properly read back
        self.assertEqual('hello', next_state_view.get('abcd').decode())
        self.assertEqual({'abcd': 'hello'.encode()},
                         {k: v for k, v in next_state_view.leaves('')})
Ejemplo n.º 52
0
class StateListRequestHandler(Handler):
    def __init__(self, database):
        self._tree = MerkleDatabase(database)

    def handle(self, identity, message_content):
        request = client_pb2.ClientStateListRequest()
        resp_proto = client_pb2.ClientStateListResponse
        status = resp_proto.OK

        try:
            request.ParseFromString(message_content)
            self._tree.set_merkle_root(request.merkle_root)
        except KeyError as e:
            status = resp_proto.NORESOURCE
            LOGGER.debug(e)
        except DecodeError:
            status = resp_proto.ERROR
            LOGGER.info("Expected protobuf of class %s failed to "
                        "deserialize", request)

        if status != resp_proto.OK:
            response = resp_proto(status=status)
        else:
            prefix = request.prefix
            leaves = self._tree.leaves(prefix)

            if len(leaves) == 0:
                status = resp_proto.NORESOURCE
                response = resp_proto(status=status)
            else:
                entries = [Entry(address=a, data=v) for a, v in leaves.items()]
                response = resp_proto(status=status, entries=entries)

        return HandlerResult(
            status=HandlerStatus.RETURN,
            message_out=response,
            message_type=validator_pb2.Message.CLIENT_STATE_LIST_RESPONSE)
Ejemplo n.º 53
0
    def test_merkle_trie_update(self):
        value = ''.join(random.choice(string.ascii_lowercase)
                        for _ in range(512))
        keys = []
        for i in range(1000):
            key = ''.join(random.choice(string.ascii_lowercase)
                          for _ in range(10))
            keys.append(key)
            hash = MerkleDatabase.hash(key)
            new_root = self.trie.set(hash, {key: value})
            self.trie.set_merkle_root(new_root)

        set_items = {}
        for key in random.sample(keys, 50):
            hash = MerkleDatabase.hash(key)
            thing = {key: 5.0}
            set_items[hash] = thing

        update_root = self.trie.update(set_items)
        self.trie.set_merkle_root(update_root)

        for address in set_items:
            self.assertEqual(self.trie.get(address),
                             set_items[address])
Ejemplo n.º 54
0
    def __init__(self, with_genesis=True):
        self.block_sender = MockBlockSender()
        self.batch_sender = MockBatchSender()
        self.dir = tempfile.mkdtemp()
        self.block_db = NativeLmdbDatabase(
            os.path.join(self.dir, 'block.lmdb'),
            BlockStore.create_index_configuration())
        self.block_store = BlockStore(self.block_db)
        self.block_cache = BlockCache(self.block_store)
        self.state_db = NativeLmdbDatabase(
            os.path.join(self.dir, "merkle.lmdb"),
            MerkleDatabase.create_index_configuration())

        self.state_view_factory = NativeStateViewFactory(self.state_db)

        self.block_manager = BlockManager()
        self.block_manager.add_commit_store(self.block_store)

        context = create_context('secp256k1')
        private_key = context.new_random_private_key()
        crypto_factory = CryptoFactory(context)
        self.signer = crypto_factory.new_signer(private_key)

        identity_private_key = context.new_random_private_key()
        self.identity_signer = crypto_factory.new_signer(identity_private_key)
        chain_head = None
        if with_genesis:
            self.genesis_block = self.generate_genesis_block()
            chain_head = self.genesis_block
            self.block_manager.put([chain_head.block])
            self.block_manager.persist(
                chain_head.block.header_signature,
                "commit_store")

        self.block_publisher = BlockPublisher(
            block_manager=self.block_manager,
            transaction_executor=MockTransactionExecutor(),
            transaction_committed=self.block_store.has_transaction,
            batch_committed=self.block_store.has_batch,
            state_view_factory=self.state_view_factory,
            block_sender=self.block_sender,
            batch_sender=self.block_sender,
            chain_head=chain_head.block,
            identity_signer=self.identity_signer,
            data_dir=None,
            config_dir=None,
            permission_verifier=MockPermissionVerifier(),
            batch_observers=[])
Ejemplo n.º 55
0
def get_databases(bind_network, data_dir):
    # Get the global state database to operate on
    global_state_db_filename = os.path.join(
        data_dir, 'merkle-{}.lmdb'.format(bind_network[-2:]))
    LOGGER.debug(
        'verifying state in %s', global_state_db_filename)
    global_state_db = NativeLmdbDatabase(
        global_state_db_filename,
        indexes=MerkleDatabase.create_index_configuration())

    # Get the blockstore
    block_db_filename = os.path.join(
        data_dir, 'block-{}.lmdb'.format(bind_network[-2:]))
    LOGGER.debug('block store file is %s', block_db_filename)
    block_db = NativeLmdbDatabase(
        block_db_filename,
        indexes=BlockStore.create_index_configuration())
    blockstore = BlockStore(block_db)

    return global_state_db, blockstore
    def compute_state_hashes_wo_scheduler(self, base_dir):
        """Creates a state hash from the state updates from each txn in a
        valid batch.

        Returns state_hashes (list of str): The merkle roots from state
            changes in 1 or more blocks in the yaml file.

        """

        database = NativeLmdbDatabase(
            os.path.join(base_dir, 'compute_state_hashes_wo_scheduler.lmdb'),
            indexes=MerkleDatabase.create_index_configuration(),
            _size=10 * 1024 * 1024)

        tree = MerkleDatabase(database=database)
        state_hashes = []
        updates = {}
        for batch in self._batches:
            b_id = batch.header_signature
            result = self._batch_results[b_id]
            if result.is_valid:
                for txn in batch.transactions:
                    txn_id = txn.header_signature
                    _, address_values, deletes = self._txn_execution[txn_id]
                    batch_updates = {}
                    for pair in address_values:
                        batch_updates.update({a: pair[a] for a in pair.keys()})

                    # since this is entirely serial, any overwrite
                    # of an address is expected and desirable.
                    updates.update(batch_updates)

                    for address in deletes:
                        if address in updates:
                            del updates[address]

            # This handles yaml files that have state roots in them
            if result.state_hash is not None:
                s_h = tree.update(set_items=updates, virtual=False)
                tree.set_merkle_root(merkle_root=s_h)
                state_hashes.append(s_h)
        if not state_hashes:
            state_hashes.append(tree.update(set_items=updates))
        return state_hashes
Ejemplo n.º 57
0
    def test_complex_basecontext_squash(self):
        """Tests complex context basing and squashing.
                                            i=qq,dd dd=0
                                            o=dd,pp pp=1
                                i=cc,aa  +->context_3_2a_1+|
                                o=dd,ll  |                 |
               i=aa,ab      +->context_2a|  i=aa    aa=0   |
               o=cc,ab      |   dd=10    |  o=aa,ll ll=1   |
        sh0->context_1-->sh1|   ll=11    +->context_3_2a_2+|->sh1
               cc=0         |   i=cc,aa  +->context_3_2b_1+|
               ab=1         |   o=nn,mm  |  i=nn,ba mm=0   |
                            +->context_2b|  o=mm,ba ba=1   |
                                nn=0     |                 |
                                mm=1     +->context_3_2b_2+|
                                            i=nn,oo ab=0
                                            o=ab,oo oo=1

        Notes:
            Test:
                1. Create a context off of the first state hash, set
                   addresses in it, and squash that context, getting a new
                   merkle root.
                2. Create 2 contexts with the context in # 1 as the base, and
                   for each of these contexts set addresses to values where the
                   outputs for each are disjoint.
                3. For each of these 2 contexts create 2 more contexts each
                   having one of the contexts in # 2 as the base context, and
                   set addresses to values.
                4. Squash the 4 contexts from #3 and assert the state hash
                   is equal to a manually computed state hash.
        """

        squash = self.context_manager.get_squash_handler()
        # 1)
        inputs_1 = [self._create_address('aa'),
                    self._create_address('ab')]
        outputs_1 = [self._create_address('cc'),
                     self._create_address('ab')]
        context_1 = self.context_manager.create_context(
            state_hash=self.first_state_hash,
            base_contexts=[],
            inputs=inputs_1,
            outputs=outputs_1)
        self.context_manager.set(
            context_id=context_1,
            address_value_list=[{a: v} for a, v in zip(
                outputs_1, [bytes(i) for i in range(len(outputs_1))])])

        sh1 = squash(
            state_root=self.first_state_hash,
            context_ids=[context_1],
            persist=True,
            clean_up=True)

        # 2)
        inputs_2a = [self._create_address('cc'),
                     self._create_address('aa')]
        outputs_2a = [self._create_address('dd'),
                      self._create_address('ll')]
        context_2a = self.context_manager.create_context(
            state_hash=self.first_state_hash,
            base_contexts=[],
            inputs=inputs_2a,
            outputs=outputs_2a)

        inputs_2b = [self._create_address('cc'),
                     self._create_address('aa')]
        outputs_2b = [self._create_address('nn'),
                      self._create_address('mm')]
        context_2b = self.context_manager.create_context(
            state_hash=sh1,
            base_contexts=[],
            inputs=inputs_2b,
            outputs=outputs_2b)

        self.context_manager.set(
            context_id=context_2a,
            address_value_list=[{a: bytes(v)}
                                for a, v in zip(outputs_2a,
                                                range(10,
                                                      10 + len(outputs_2a)))]
        )
        self.context_manager.set(
            context_id=context_2b,
            address_value_list=[{a: bytes(v)}
                                for a, v in zip(outputs_2b,
                                                range(len(outputs_2b)))]
        )

        # 3)
        inputs_3_2a_1 = [self._create_address('qq'),
                         self._create_address('dd')]
        outputs_3_2a_1 = [self._create_address('dd'),
                          self._create_address('pp')]
        context_3_2a_1 = self.context_manager.create_context(
            state_hash=sh1,
            base_contexts=[context_2a],
            inputs=inputs_3_2a_1,
            outputs=outputs_3_2a_1
        )
        inputs_3_2a_2 = [self._create_address('aa')]
        outputs_3_2a_2 = [self._create_address('aa'),
                          self._create_address('ll')]
        context_3_2a_2 = self.context_manager.create_context(
            state_hash=sh1,
            base_contexts=[context_2a],
            inputs=inputs_3_2a_2,
            outputs=outputs_3_2a_2)

        inputs_3_2b_1 = [self._create_address('nn'),
                         self._create_address('ab')]
        outputs_3_2b_1 = [self._create_address('mm'),
                          self._create_address('ba')]
        context_3_2b_1 = self.context_manager.create_context(
            state_hash=sh1,
            base_contexts=[context_2b],
            inputs=inputs_3_2b_1,
            outputs=outputs_3_2b_1)

        inputs_3_2b_2 = [self._create_address('nn'),
                         self._create_address('oo')]
        outputs_3_2b_2 = [self._create_address('ab'),
                          self._create_address('oo')]
        context_3_2b_2 = self.context_manager.create_context(
            state_hash=sh1,
            base_contexts=[context_2b],
            inputs=inputs_3_2b_2,
            outputs=outputs_3_2b_2)

        self.context_manager.set(
            context_id=context_3_2a_1,
            address_value_list=[{a: bytes(v)}
                                for a, v in zip(outputs_3_2a_1,
                                                range(len(outputs_3_2a_1)))])
        self.context_manager.set(
            context_id=context_3_2a_2,
            address_value_list=[{a: bytes(v)}
                                for a, v in zip(outputs_3_2a_2,
                                                range(len(outputs_3_2a_2)))])
        self.context_manager.set(
            context_id=context_3_2b_1,
            address_value_list=[{a: bytes(v)}
                                for a, v in zip(outputs_3_2b_1,
                                                range(len(outputs_3_2b_1)))])
        self.context_manager.set(
            context_id=context_3_2b_2,
            address_value_list=[{a: bytes(v)}
                                for a, v in zip(outputs_3_2b_2,
                                                range(len(outputs_3_2b_2)))])

        # 4)
        sh2 = squash(
            state_root=sh1,
            context_ids=[context_3_2a_1, context_3_2a_2,
                         context_3_2b_1, context_3_2b_2],
            persist=False,
            clean_up=True)

        tree = MerkleDatabase(self.database_results)
        state_hash_from_1 = tree.update(
            set_items={a: v for a, v in zip(outputs_1,
                                        [bytes(i)
                                         for i in range(len(outputs_1))])},
                                        virtual=False)
        self.assertEquals(state_hash_from_1, sh1,
                          "The manually calculated state hash from the first "
                          "context and the one calculated by squashing that "
                          "state hash should be the same")
        tree.set_merkle_root(state_hash_from_1)
        test_sh2 = tree.update(set_items={self._create_address('aa'): bytes(0),
                                          self._create_address('ab'): bytes(0),
                                          self._create_address('ba'): bytes(1),
                                          self._create_address('dd'): bytes(0),
                                          self._create_address('ll'): bytes(1),
                                          self._create_address('mm'): bytes(0),
                                          self._create_address('oo'): bytes(1),
                                          self._create_address('pp'): bytes(1),
                                          self._create_address('nn'): bytes(0),
                                          self._create_address('cc'): bytes(0)})

        self.assertEquals(sh2, test_sh2, "Manually calculated and context "
                                         "manager calculated merkle hashes "
                                         "are the same")
Ejemplo n.º 58
0
    def test_state_root_after_parallel_ctx(self):
        """Tests that the correct state root is calculated after basing one
        context off of multiple contexts.

                              i=abcd
                              o=aaaa
                           +>context_1+
                           |  aaaa=1  |
                           |          |
               i=llll      |   i=bacd |      i=bbbb,aaaa
               o=llll      |   o=bbbb |      o=cccc,llll
        sh0--->ctx_0-->sh1>|-->context_2-+---->context_n---->sh2
               llll=5      |   bbbb=2 |      cccc=4
                           |          |      llll=8
                           |   i=abcd |
                           |   o=cccc |
                           +>context_3+
                               cccc=3

        Notes:
            Test:
                1. Create a context, set a value in it and squash it into a new
                   state hash.
                2. Create 3 contexts based off of the state root from #1.
                3. Set values at addresses to all three contexts.
                4. Base another context off of the contexts from #2.
                5. Set a value to an address in this context that has already
                   been set to in the non-base context.
                6. Squash the contexts producing a state hash and assert
                   that it equals a state hash obtained by manually updating
                   the merkle tree.
        """

        sh0 = self.first_state_hash
        # 1)
        squash = self.context_manager.get_squash_handler()
        ctx_1 = self.context_manager.create_context(
            state_hash=sh0,
            base_contexts=[],
            inputs=[self._create_address('llll')],
            outputs=[self._create_address('llll')]
        )
        self.context_manager.set(
            context_id=ctx_1,
            address_value_list=[{self._create_address('llll'): b'5'}]
        )

        sh1 = squash(
            state_root=sh0,
            context_ids=[ctx_1],
            persist=True,
            clean_up=True)

        # 2)
        context_1 = self.context_manager.create_context(
            state_hash=sh1,
            base_contexts=[],
            inputs=[self._create_address('abcd')],
            outputs=[self._create_address('aaaa')]
        )
        context_2 = self.context_manager.create_context(
            state_hash=sh1,
            base_contexts=[],
            inputs=[self._create_address('bacd')],
            outputs=[self._create_address('bbbb')]
        )
        context_3 = self.context_manager.create_context(
            state_hash=sh1,
            base_contexts=[],
            inputs=[self._create_address('abcd')],
            outputs=[self._create_address('cccc'),
                     self._create_address('dddd')]
        )

        # 3)
        self.context_manager.set(
            context_id=context_1,
            address_value_list=[{self._create_address('aaaa'): b'1'}]
        )
        self.context_manager.set(
            context_id=context_2,
            address_value_list=[{self._create_address('bbbb'): b'2'}]
        )
        self.context_manager.set(
            context_id=context_3,
            address_value_list=[{self._create_address('cccc'): b'3'}]
        )

        # 4)
        context_n = self.context_manager.create_context(
            state_hash=sh1,
            base_contexts=[context_1, context_2, context_3],
            inputs=[self._create_address('bbbb'), self._create_address('aaaa')],
            outputs=[self._create_address('cccc'), self._create_address('llll')]
        )

        # 5)
        self.context_manager.set(
            context_id=context_n,
            address_value_list=[{self._create_address('cccc'): b'4',
                                 self._create_address('llll'): b'8'}]
        )

        # 6)
        cm_state_root = squash(
            state_root=sh1,
            context_ids=[context_n],
            persist=False,
            clean_up=True)

        tree = MerkleDatabase(self.database_results)
        calc_state_root = tree.update({self._create_address('aaaa'): b'1',
                                       self._create_address('bbbb'): b'2',
                                       self._create_address('cccc'): b'4',
                                       self._create_address('llll'): b'8'})
        self.assertEquals(calc_state_root, cm_state_root)