예제 #1
0
파일: tests.py 프로젝트: sambacha/sprawl
    def test_state_store_get_and_set(self):
        """Tests that we correctly get and set state changes to a
        StateDeltaStore.

        This tests sets a list of state change values and then gets them back,
        ensuring that the data is the same.
        """

        database = DictDatabase()

        delta_store = StateDeltaStore(database)

        changes = [StateChange(address='a100000' + str(i),
                               value=str(i).encode(),
                               type=StateChange.SET)
                   for i in range(0, 10)]

        delta_store.save_state_deltas('my_state_root_hash', changes)

        stored_changes = delta_store.get_state_deltas('my_state_root_hash')
        # This is a list-like repeated field, but to make it comparable we'll
        # have to generate a new list
        stored_changes = [c for c in stored_changes]

        self.assertEqual(changes, stored_changes)
예제 #2
0
파일: tests.py 프로젝트: sambacha/sprawl
    def test_raise_key_error_on_missing_root_hash(self):
        """Tests that we correctly raise key error on a missing hash
        """
        database = DictDatabase()
        delta_store = StateDeltaStore(database)

        with self.assertRaises(KeyError):
            delta_store.get_state_deltas('unknown_state_root_hash')
예제 #3
0
    def setUp(self):
        self.database_of_record = dict_database.DictDatabase()
        self.state_delta_store = StateDeltaStore(dict_database.DictDatabase())
        self.context_manager = context_manager.ContextManager(
            self.database_of_record, self.state_delta_store)
        self.first_state_hash = self.context_manager.get_first_root()

        # used for replicating state hash through direct merkle tree updates
        self.database_results = dict_database.DictDatabase()
예제 #4
0
    def test_add_subscriber(self):
        """Test adding a subscriber, who has no known blocks.

        This scenerio is valid for subscribers who have never connected and
        would need to receive all deltas since the genesis block.

        On registration, the subscriber should receive one event, comprised
        of the state changes for the genesis block.
        """
        mock_service = Mock()
        block_tree_manager = BlockTreeManager()

        delta_store = StateDeltaStore(DictDatabase())

        delta_processor = StateDeltaProcessor(
            service=mock_service,
            state_delta_store=delta_store,
            block_store=block_tree_manager.block_store)

        delta_store.save_state_deltas(
            block_tree_manager.chain_head.state_root_hash,
            [StateChange(address='deadbeef0000000',
                         value='my_genesis_value'.encode(),
                         type=StateChange.SET),
             StateChange(address='a14ea01',
                         value='some other state value'.encode(),
                         type=StateChange.SET)])

        delta_processor.add_subscriber(
            'test_conn_id',
            [],
            ['deadbeef'])

        self.assertEqual(['test_conn_id'], delta_processor.subscriber_ids)

        # test that it catches up, and receives the events from the chain head.
        # In this case, it should just be once, as the chain head is the
        # genesis block
        chain_head = block_tree_manager.chain_head
        mock_service.send.assert_called_with(
            validator_pb2.Message.STATE_DELTA_EVENT,
            StateDeltaEvent(
                block_id=chain_head.identifier,
                block_num=chain_head.block_num,
                state_root_hash=chain_head.state_root_hash,
                previous_block_id=chain_head.previous_block_id,
                state_changes=[StateChange(address='deadbeef0000000',
                               value='my_genesis_value'.encode(),
                               type=StateChange.SET)]
            ).SerializeToString(),
            connection_id='test_conn_id')
예제 #5
0
    def test_publish_deltas_no_state_changes(self):
        """Given a block transition, where no state changes happened (e.g. it
        only had transactions which did not change state), the
        StateDeltaProcessor should still publish an event with the block change
        information.
        """
        mock_service = Mock()
        block_tree_manager = BlockTreeManager()

        database = DictDatabase()
        delta_store = StateDeltaStore(database)

        delta_processor = StateDeltaProcessor(
            service=mock_service,
            state_delta_store=delta_store,
            block_store=block_tree_manager.block_store)

        delta_processor.add_subscriber(
            'subscriber_conn_id', [block_tree_manager.chain_head.identifier],
            ['000000'])

        next_block = block_tree_manager.generate_block()
        delta_processor.publish_deltas(next_block)

        mock_service.send.assert_called_with(
            validator_pb2.Message.STATE_DELTA_EVENT,
            StateDeltaEvent(
                block_id=next_block.identifier,
                block_num=next_block.header.block_num,
                state_root_hash=next_block.header.state_root_hash,
                previous_block_id=next_block.header.previous_block_id,
                state_changes=[]).SerializeToString(),
            connection_id='subscriber_conn_id')
예제 #6
0
    def test_publish_deltas(self):
        """Tests that a subscriber filtering on an address prefix receives only
        the changes in an event that match.
        """
        mock_service = Mock()
        block_tree_manager = BlockTreeManager()

        database = DictDatabase()
        delta_store = StateDeltaStore(database)

        delta_processor = StateDeltaProcessor(
            service=mock_service,
            state_delta_store=delta_store,
            block_store=block_tree_manager.block_store)

        delta_processor.add_subscriber(
            'test_conn_id', [block_tree_manager.chain_head.identifier],
            ['deadbeef'])

        next_block = block_tree_manager.generate_block()
        # State added during context squash for our block
        delta_store.save_state_deltas(next_block.header.state_root_hash, [
            StateChange(address='deadbeef01',
                        value='my_state_Value'.encode(),
                        type=StateChange.SET),
            StateChange(address='a14ea01',
                        value='some other state value'.encode(),
                        type=StateChange.SET)
        ])

        # call to publish deltas for that block to the subscribers
        delta_processor.publish_deltas(next_block)

        mock_service.send.assert_called_with(
            validator_pb2.Message.STATE_DELTA_EVENT,
            StateDeltaEvent(
                block_id=next_block.identifier,
                block_num=next_block.header.block_num,
                state_root_hash=next_block.header.state_root_hash,
                previous_block_id=next_block.header.previous_block_id,
                state_changes=[
                    StateChange(address='deadbeef01',
                                value='my_state_Value'.encode(),
                                type=StateChange.SET)
                ]).SerializeToString(),
            connection_id='test_conn_id')
예제 #7
0
    def setUp(self):
        self.database_of_record = dict_database.DictDatabase()
        self.state_delta_store = StateDeltaStore(dict_database.DictDatabase())
        self.context_manager = context_manager.ContextManager(
            self.database_of_record, self.state_delta_store)
        self.first_state_hash = self.context_manager.get_first_root()

        # used for replicating state hash through direct merkle tree updates
        self.database_results = dict_database.DictDatabase()
예제 #8
0
    def test_publish_deltas_subscriber_matches_no_addresses(self):
        """Given a subscriber whose prefix filters don't match any addresses
        in the current state delta, it should still receive an event with the
        block change information.
        """
        mock_service = Mock()
        block_tree_manager = BlockTreeManager()

        database = DictDatabase()
        delta_store = StateDeltaStore(database)

        delta_processor = StateDeltaProcessor(
            service=mock_service,
            state_delta_store=delta_store,
            block_store=block_tree_manager.block_store)

        delta_processor.add_subscriber(
            'settings_conn_id',
            [block_tree_manager.chain_head.identifier],
            ['000000'])

        next_block = block_tree_manager.generate_block()
        # State added during context squash for our block
        delta_store.save_state_deltas(
            next_block.header.state_root_hash,
            [StateChange(address='deadbeef01',
                         value='my_state_Value'.encode(),
                         type=StateChange.SET),
             StateChange(address='a14ea01',
                         value='some other state value'.encode(),
                         type=StateChange.SET)])

        # call to publish deltas for that block to the subscribers
        delta_processor.publish_deltas(next_block)

        mock_service.send.assert_called_with(
            validator_pb2.Message.STATE_DELTA_EVENT,
            StateDeltaEvent(
                block_id=next_block.identifier,
                block_num=next_block.header.block_num,
                state_root_hash=next_block.header.state_root_hash,
                state_changes=[]
            ).SerializeToString(),
            connection_id='settings_conn_id')
예제 #9
0
    def test_get_events_ignore_bad_blocks(self):
        """Tests that the GetStateDeltaEventsHandler will return a response
        containing only the events for blocks that exists.
        """
        block_tree_manager = BlockTreeManager()

        delta_store = StateDeltaStore(DictDatabase())

        delta_store.save_state_deltas(
            block_tree_manager.chain_head.state_root_hash, [
                StateChange(address='deadbeef0000000',
                            value='my_genesis_value'.encode(),
                            type=StateChange.SET),
                StateChange(address='a14ea01',
                            value='some other state value'.encode(),
                            type=StateChange.SET)
            ])

        handler = GetStateDeltaEventsHandler(block_tree_manager.block_store,
                                             delta_store)

        request = GetStateDeltaEventsRequest(
            block_ids=[
                block_tree_manager.chain_head.identifier, 'somebadblockid'
            ],
            address_prefixes=['deadbeef']).SerializeToString()

        response = handler.handle('test_conn_id', request)
        self.assertEqual(HandlerStatus.RETURN, response.status)
        self.assertEqual(GetStateDeltaEventsResponse.OK,
                         response.message_out.status)

        chain_head = block_tree_manager.chain_head
        self.assertEqual([
            StateDeltaEvent(block_id=chain_head.identifier,
                            block_num=chain_head.block_num,
                            state_root_hash=chain_head.state_root_hash,
                            previous_block_id=chain_head.previous_block_id,
                            state_changes=[
                                StateChange(address='deadbeef0000000',
                                            value='my_genesis_value'.encode(),
                                            type=StateChange.SET)
                            ])
        ], [event for event in response.message_out.events])
예제 #10
0
    def test_is_valid_subscription_no_known_blocks(self):
        """Test that a check for a valid subscription with no known block ids
        returns True.
        """
        mock_service = Mock()
        block_tree_manager = BlockTreeManager()

        delta_store = StateDeltaStore(DictDatabase())

        delta_processor = StateDeltaProcessor(
            service=mock_service,
            state_delta_store=delta_store,
            block_store=block_tree_manager.block_store)

        self.assertTrue(delta_processor.is_valid_subscription([]))
예제 #11
0
    def test_is_valid_subscription_known_chain_head(self):
        """Test that a check for a valid subscription with the known block id
        is the chain head returns True.
        """
        mock_service = Mock()
        block_tree_manager = BlockTreeManager()

        delta_store = StateDeltaStore(DictDatabase())

        delta_processor = StateDeltaProcessor(
            service=mock_service,
            state_delta_store=delta_store,
            block_store=block_tree_manager.block_store)

        self.assertTrue(delta_processor.is_valid_subscription([
            block_tree_manager.chain_head.identifier]))
예제 #12
0
    def test_is_valid_subscription_known_old_chain(self):
        """Test that a check for a valid subscription where the known block id
        is a block in the middle of the chain should return True.
        """
        mock_service = Mock()
        block_tree_manager = BlockTreeManager()

        chain = block_tree_manager.generate_chain(
            block_tree_manager.chain_head, 5)

        # Grab an id from the middle of the chain
        known_id = chain[3].identifier
        block_tree_manager.block_store.update_chain(chain)

        delta_store = StateDeltaStore(DictDatabase())
        delta_processor = StateDeltaProcessor(
            service=mock_service,
            state_delta_store=delta_store,
            block_store=block_tree_manager.block_store)

        self.assertTrue(delta_processor.is_valid_subscription([known_id]))
예제 #13
0
    def test_get_events_no_valid_block_ids(self):
        """Tests that the GetStateDeltaEventsHandler will return a response
        with NO_VALID_BLOCKS_SPECIFIED error when no valid blocks are
        specified in the request.
        """
        block_tree_manager = BlockTreeManager()

        delta_store = StateDeltaStore(DictDatabase())

        handler = GetStateDeltaEventsHandler(block_tree_manager.block_store,
                                             delta_store)

        request = GetStateDeltaEventsRequest(
            block_ids=['somebadblockid'],
            address_prefixes=['deadbeef']).SerializeToString()

        response = handler.handle('test_conn_id', request)

        self.assertEqual(HandlerStatus.RETURN, response.status)
        self.assertEqual(GetStateDeltaEventsResponse.NO_VALID_BLOCKS_SPECIFIED,
                         response.message_out.status)
예제 #14
0
class TestContextManager(unittest.TestCase):
    def setUp(self):
        self.database_of_record = dict_database.DictDatabase()
        self.state_delta_store = StateDeltaStore(dict_database.DictDatabase())
        self.context_manager = context_manager.ContextManager(
            self.database_of_record, self.state_delta_store)
        self.first_state_hash = self.context_manager.get_first_root()

        # used for replicating state hash through direct merkle tree updates
        self.database_results = dict_database.DictDatabase()

    def tearDown(self):
        self.context_manager.stop()

    def _create_address(self, value=None):
        """
        Args:
            value: (str)

        Returns: (str) sha512 of value or random

        """
        if value is None:
            value = time.time().hex()
        return hashlib.sha512(value.encode()).hexdigest()[:70]

    def _setup_context(self):
        # 1) Create transaction data
        first_transaction = {
            'inputs':
            [self._create_address(a) for a in ['aaaa', 'bbbb', 'cccc']],
            'outputs':
            [self._create_address(a) for a in ['llaa', 'aall', 'nnnn']]
        }
        second_transaction = {
            'inputs': [self._create_address(a) for a in ['aaaa', 'dddd']],
            'outputs': [
                self._create_address(a)
                for a in ['zzzz', 'yyyy', 'tttt', 'qqqq']
            ]
        }
        third_transaction = {
            'inputs':
            [self._create_address(a) for a in ['eeee', 'dddd', 'ffff']],
            'outputs': [
                self._create_address(a)
                for a in ['oooo', 'oozz', 'zzoo', 'ppoo', 'aeio']
            ]
        }
        # 2) Create contexts based on that data
        context_id_1 = self.context_manager.create_context(
            state_hash=self.first_state_hash,
            base_contexts=[],
            inputs=first_transaction['inputs'],
            outputs=first_transaction['outputs'])
        context_id_2 = self.context_manager.create_context(
            state_hash=self.first_state_hash,
            base_contexts=[],
            inputs=second_transaction['inputs'],
            outputs=second_transaction['outputs'])
        context_id_3 = self.context_manager.create_context(
            state_hash=self.first_state_hash,
            base_contexts=[],
            inputs=third_transaction['inputs'],
            outputs=third_transaction['outputs'])

        # 3) Set addresses with values
        self.context_manager.set(context_id_1, [{
            self._create_address(a): v
        } for a, v in [('llaa', b'1'), ('aall', b'2'), ('nnnn', b'3')]])
        self.context_manager.set(context_id_2, [{
            self._create_address(a): v
        } for a, v in [('zzzz', b'9'), ('yyyy', b'11'), ('tttt',
                                                         b'12'), ('qqqq',
                                                                  b'13')]])
        self.context_manager.set(context_id_3, [{
            self._create_address(a): v
        } for a, v in [('oooo', b'25'), ('oozz', b'26'), (
            'zzoo', b'27'), ('ppoo', b'28'), ('aeio', b'29')]])

        # 4)
        context_id = self.context_manager.create_context(
            state_hash=self.first_state_hash,
            base_contexts=[context_id_1, context_id_2, context_id_3],
            inputs=[
                self._create_address(a)
                for a in ['llaa', 'yyyy', 'tttt', 'zzoo']
            ],
            outputs=[
                self._create_address(a)
                for a in ['llaa', 'yyyy', 'tttt', 'zzoo', 'aeio']
            ])
        return context_id

    def _create_txn_inputs_outputs(self, start=None):
        """Create unique addresses that make up the inputs, outputs,
         reads, and writes that are involved in a context.

         Venn Diagram of relationship of disjoint sets that make up the
         inputs, outputs, reads, and writes.

         Knowledge of which disjoint set an address is a part of
         may give knowledge about a test failure in the context
         manager.

                    Inputs                      Outputs
            +----------+--------------------------+-----------+
            |          |                          |           |
            |  i___    |Reads       io__        Writes  _o__  |
            |          |                          |           |
            |    +-----------+-----------+---------------+    |
            |    |     |     |           |        |      |    |
            |    |     |     |           |        |      |    |
            |    |     |     |           |        |      |    |
            |    |     |     |           |        |      |    |
            |    |i_r_ | ior_|  iorw     |  io_w  | _o_w |    |
            |    |     |     |           |        |      |    |
            |    |     |     |           |        |      |    |
            |    |     |     |           |        |      |    |
            |    +-----------+-----------+---------------+    |
            |          |                          |           |
            |          |                          |           |
            +----------+--------------------------+-----------+

        Args:
            start (int): An integer to start the sequence of integers being
            hashed to addresses.

        Returns (namedtuple): An object that holds inputs, outputs, reads,
            and writes.

        """
        if start is None:
            start = 0
        iorw = [self._create_address(str(i)) for i in range(start, start + 10)]
        i_r_ = [
            self._create_address(str(i))
            for i in range(start + 10, start + 20)
        ]
        ior_ = [
            self._create_address(str(i))
            for i in range(start + 20, start + 30)
        ]
        io__ = [
            self._create_address(str(i))
            for i in range(start + 30, start + 40)
        ]
        io_w = [
            self._create_address(str(i))
            for i in range(start + 40, start + 50)
        ]
        _o_w = [
            self._create_address(str(i))
            for i in range(start + 50, start + 60)
        ]
        _o__ = [
            self._create_address(str(i))
            for i in range(start + 60, start + 70)
        ]
        i___ = [
            self._create_address(str(i))
            for i in range(start + 70, start + 80)
        ]
        addresses = TestAddresses(inputs=iorw + ior_ + io__ + io_w + i___,
                                  outputs=ior_ + io__ + io_w + _o__ + _o_w,
                                  reads=i_r_ + ior_,
                                  writes=io_w + _o_w)
        return addresses

    def test_address_enforcement(self):
        """Tests that the ContextManager enforces address characteristics.

        Notes:
            1. Call get and set on the ContextManager with an address that is
               under a namespace, but is an invalid address, and test that
               the methods raise an AuthorizationException.
        """

        # 1)
        invalid_address1 = 'a' * 69 + 'n'
        invalid_address2 = 'b' * 69 + 'y'

        context_id1 = self.context_manager.create_context(
            state_hash=self.context_manager.get_first_root(),
            base_contexts=[],
            inputs=['aaaaaaaa', 'bbbbbbbb'],
            outputs=['aaaaaaaa', 'bbbbbbbb'])
        with self.assertRaises(context_manager.AuthorizationException):
            self.context_manager.get(
                context_id=context_id1,
                address_list=[invalid_address1, invalid_address2])
        with self.assertRaises(context_manager.AuthorizationException):
            self.context_manager.set(context_id=context_id1,
                                     address_value_list=[{
                                         invalid_address1: b'1'
                                     }, {
                                         invalid_address2: b'2'
                                     }])

    def test_get_set_wrong_namespace(self):
        """Tests that getting and setting from outside the namespace will
        raise a AuthorizationException.

        Notes:
            1. Assert that sets on a context with addresses that aren't
               under an output namespace raise an AuthorizationException.

            2. Assert that gets on a context with addresses that aren't under
               an input namespace raise an AuthorizationException.
        """

        wrong_namespace1 = self._create_address('a')[-10:]
        wrong_namespace2 = '00000000'

        ctx_1 = self.context_manager.create_context(
            state_hash=self.context_manager.get_first_root(),
            base_contexts=[],
            inputs=[wrong_namespace1, wrong_namespace2],
            outputs=[wrong_namespace1, wrong_namespace2])
        # 1
        with self.assertRaises(context_manager.AuthorizationException):
            self.context_manager.set(context_id=ctx_1,
                                     address_value_list=[{
                                         self._create_address('a'):
                                         b'1'
                                     }])

        with self.assertRaises(context_manager.AuthorizationException):
            self.context_manager.set(context_id=ctx_1,
                                     address_value_list=[{
                                         self._create_address('c'):
                                         b'5'
                                     }])
        # 2
        with self.assertRaises(context_manager.AuthorizationException):
            self.context_manager.get(context_id=ctx_1,
                                     address_list=[self._create_address('a')])

        with self.assertRaises(context_manager.AuthorizationException):
            self.context_manager.get(context_id=ctx_1,
                                     address_list=[self._create_address('c')])

    def test_exception_on_invalid_input(self):
        """Tests that invalid inputs raise an exception. Tested with invalid
        characters, odd number of characters, and too long namespace;

        Notes:
            1) Assert that inputs with a namespace with an odd number of
               characters raise a CreateContextException.
            2) Assert that inputs with a 71 character namespace raise a
               CreateContextException.
            3) Assert that inputs with a namespace with several invalid
               characters raise a CreateContextException.
        """

        invalid_input_output1 = '0db7e8zc'  # invalid character
        invalid_input_output2 = '7ef84ed' * 10 + '5'  # too long, 71 chars
        invalid_input_output3 = 'yy76ftoph7465873ddde389f'  # invalid chars

        valid_input_output1 = 'd8f533bbb74443222daad4'
        valid_input_output2 = '77465847465784757848ddddddf'

        state_hash = self.context_manager.get_first_root()

        # 1
        with self.assertRaises(context_manager.CreateContextException):
            self.context_manager.create_context(
                state_hash=state_hash,
                base_contexts=[],
                inputs=[invalid_input_output1, valid_input_output1],
                outputs=[valid_input_output2])
        # 2
        with self.assertRaises(context_manager.CreateContextException):
            self.context_manager.create_context(
                state_hash=state_hash,
                base_contexts=[],
                inputs=[valid_input_output1, invalid_input_output2],
                outputs=[valid_input_output2])
        # 3
        with self.assertRaises(context_manager.CreateContextException):
            self.context_manager.create_context(
                state_hash=state_hash,
                base_contexts=[],
                inputs=[invalid_input_output3, valid_input_output2],
                outputs=[valid_input_output2, valid_input_output1])

    def test_exception_on_invalid_output(self):
        """Tests that invalid outputs raise an exception. Tested with invalid
        characters, odd number of characters, and too long namespace;

        Notes:
            1) Assert that outputs with a namespace with an odd number of
               characters raise a CreateContextException.
            2) Assert that outputs with a 71 character namespace raise a
               CreateContextException.
            3) Assert that outputs with a namespace with several invalid
               characters raise a CreateContextException.
        """

        invalid_input_output1 = '0db7e87'  # Odd number of characters
        invalid_input_output2 = '7ef84ed' * 10 + '5'  # too long, 71 chars
        invalid_input_output3 = 'yy76ftoph7465873ddde389f'  # invalid chars

        valid_input_output1 = 'd8f533bbb74443222daad4'
        valid_input_output2 = '77465847465784757848ddddddff'

        state_hash = self.context_manager.get_first_root()

        # 1
        with self.assertRaises(context_manager.CreateContextException):
            self.context_manager.create_context(
                state_hash=state_hash,
                base_contexts=[],
                inputs=[valid_input_output2, valid_input_output1],
                outputs=[invalid_input_output1])
        # 2
        with self.assertRaises(context_manager.CreateContextException):
            self.context_manager.create_context(
                state_hash=state_hash,
                base_contexts=[],
                inputs=[valid_input_output1, valid_input_output2],
                outputs=[invalid_input_output2])
        # 3
        with self.assertRaises(context_manager.CreateContextException):
            self.context_manager.create_context(
                state_hash=state_hash,
                base_contexts=[],
                inputs=[valid_input_output1, valid_input_output2],
                outputs=[valid_input_output2, invalid_input_output3])

    def test_namespace_gets(self):
        """Tests that gets for an address under a namespace will return the
        correct value.

        Notes:
            1) Create ctx_1 and set 'b' to b'8'.
            2) squash the previous context creating state_hash_1.
            3) Create 2 contexts off of this state hash and assert
               that gets on these contexts retrieve the correct
               value for an address that is not fully specified in the inputs.
            4) Set values to addresses in these contexts.
            5) Create 1 context off of these prior 2 contexts and assert that
               gets from this context retrieve the correct values for
               addresses that are not fully specified in the inputs. 2 of the
               values are found in the chain of contexts, and 1 is not found
               and so must be retrieved from the merkle tree.
        """

        # 1
        ctx_1 = self.context_manager.create_context(
            state_hash=self.context_manager.get_first_root(),
            base_contexts=[],
            inputs=[self._create_address('a')],
            outputs=[self._create_address('b')])

        self.context_manager.set(context_id=ctx_1,
                                 address_value_list=[{
                                     self._create_address('b'):
                                     b'8'
                                 }])

        # 2
        squash = self.context_manager.get_squash_handler()
        state_hash_1 = squash(state_root=self.context_manager.get_first_root(),
                              context_ids=[ctx_1],
                              persist=True,
                              clean_up=True)

        # 3
        ctx_1a = self.context_manager.create_context(
            state_hash=state_hash_1,
            base_contexts=[],
            inputs=[self._create_address('a')[:10]],
            outputs=[self._create_address('c')])
        self.assertEquals(
            self.context_manager.get(context_id=ctx_1a,
                                     address_list=[self._create_address('a')]),
            [(self._create_address('a'), None)])

        ctx_1b = self.context_manager.create_context(
            state_hash=state_hash_1,
            base_contexts=[],
            inputs=[self._create_address('b')[:6]],
            outputs=[self._create_address('z')])
        self.assertEquals(
            self.context_manager.get(context_id=ctx_1b,
                                     address_list=[self._create_address('b')]),
            [(self._create_address('b'), b'8')])

        # 4
        self.context_manager.set(context_id=ctx_1b,
                                 address_value_list=[{
                                     self._create_address('z'):
                                     b'2'
                                 }])

        self.context_manager.set(context_id=ctx_1a,
                                 address_value_list=[{
                                     self._create_address('c'):
                                     b'1'
                                 }])

        ctx_2 = self.context_manager.create_context(
            state_hash=state_hash_1,
            base_contexts=[ctx_1a, ctx_1b],
            inputs=[
                self._create_address('z')[:10],
                self._create_address('c')[:10],
                self._create_address('b')[:10]
            ],
            outputs=[self._create_address('w')])

        self.assertEquals(
            self.context_manager.get(context_id=ctx_2,
                                     address_list=[
                                         self._create_address('z'),
                                         self._create_address('c'),
                                         self._create_address('b')
                                     ]), [(self._create_address('z'), b'2'),
                                          (self._create_address('c'), b'1'),
                                          (self._create_address('b'), b'8')])

    def test_create_context_with_prior_state(self):
        """Tests context creation with prior state from base contexts.

        Notes:
            Set up the context:
                Create 3 prior contexts each with 3-5 addresses to set to.
                Make set calls to those addresses.
                Create 1 new context based on those three prior contexts.
                this test method:
            Test:
                Make a get call on addresses that are from prior state,
                making assertions about the correct values.
        """
        context_id = self._setup_context()

        self.assertEqual(
            self.context_manager.get(context_id, [
                self._create_address(a)
                for a in ['llaa', 'yyyy', 'tttt', 'zzoo']
            ]),
            [(self._create_address(a), v)
             for a, v in [('llaa', b'1'), ('yyyy',
                                           b'11'), ('tttt',
                                                    b'12'), ('zzoo', b'27')]])

    def test_squash(self):
        """Tests that squashing a context based on state from other
        contexts will result in the same merkle hash as updating the
        merkle tree with the same data.

        Notes:
            Set up the context

            Test:
                1) Make set calls on several of the addresses.
                2) Squash the context to get a new state hash.
                3) Apply all of the aggregate sets from all
                of the contexts, to another database with a merkle tree.
                4) Assert that the state hashes are the same.
                5) Assert that the state deltas have been stored
        """
        # 1)
        context_id = self._setup_context()
        self.context_manager.set(context_id, [{
            self._create_address(a): v
        } for a, v in [('yyyy', b'2'), ('tttt', b'4')]])

        # 2)
        squash = self.context_manager.get_squash_handler()
        resulting_state_hash = squash(self.first_state_hash, [context_id],
                                      persist=True,
                                      clean_up=True)

        # 3)
        final_state_to_update = {
            self._create_address(a): v
            for a, v in [('llaa', b'1'), ('aall', b'2'), (
                'nnnn', b'3'), ('zzzz', b'9'), ('yyyy', b'2'), (
                    'tttt',
                    b'4'), ('qqqq', b'13'), ('oooo', b'25'), (
                        'oozz', b'26'), ('zzoo',
                                         b'27'), ('ppoo',
                                                  b'28'), ('aeio', b'29')]
        }

        test_merkle_tree = MerkleDatabase(self.database_results)
        test_resulting_state_hash = test_merkle_tree.update(
            final_state_to_update, virtual=False)
        # 4)
        self.assertEqual(resulting_state_hash, test_resulting_state_hash)
        state_changes = self.state_delta_store.get_state_deltas(
            resulting_state_hash)

        # 5)
        for addr, value in final_state_to_update.items():
            expected_state_change = StateChange(address=addr,
                                                value=value,
                                                type=StateChange.SET)

            self.assertTrue(expected_state_change in state_changes)

    def test_squash_no_updates(self):
        """Tests that squashing a context that has no state updates will return
           the starting state root hash.

        Notes:
            Set up the context

            Test:
                1) Squash the context.
                2) Assert that the state hash is the same as the starting
                hash.
                3) Assert that the state deltas have not been overwritten
        """
        self.state_delta_store.save_state_deltas(
            self.first_state_hash,
            [StateChange(address='aaa', value=b'xyz', type=StateChange.SET)])

        context_id = self.context_manager.create_context(
            state_hash=self.first_state_hash,
            base_contexts=[],
            inputs=[],
            outputs=[])
        # 1)
        squash = self.context_manager.get_squash_handler()
        resulting_state_hash = squash(self.first_state_hash, [context_id],
                                      persist=True,
                                      clean_up=True)
        # 2
        self.assertIsNotNone(resulting_state_hash)
        self.assertEquals(resulting_state_hash, self.first_state_hash)

        # 3
        changes = self.state_delta_store.get_state_deltas(resulting_state_hash)

        self.assertEqual(
            [StateChange(address='aaa', value=b'xyz', type=StateChange.SET)],
            [c for c in changes])

    def test_reads_from_context_w_several_writes(self):
        """Tests that those context values that have been written to the
        Merkle tree, or that have been set to a base_context, will have the
        correct value at the address for a given context.

                                               ->context_id_a1
                                               |              |
                                               |              |
                                               |              |
        sh0-->context_id1-->sh1-->context_a-----              -->context_id_b
                                               |              |
                                               |              |
                                               |              |
                                               |              |
                                               -->context_id_a2

        Notes:

            Test:
                1. From a Merkle Tree with only the root node in it, create a
                   context and set several values, and then squash that context
                   upon the first state hash.
                2. Create a context with no base context, based on
                   the merkle root computed from the first squash.
                   Assert that gets from this context will provide
                   values that were set in the first context.
                3. Write to all of the available outputs
                4. Create a new context based on context_a, from #2,
                5. Assert that gets from this context equal the values set
                   to Context A.
                6. Create a new context based on context_a and set values to
                   this context.
                7. Create a new context based on the 2 contexts made in 4 and 6
                8. From this context assert that gets equal the correct values
                   set in the prior contexts.
        """

        squash = self.context_manager.get_squash_handler()
        test_addresses = self._create_txn_inputs_outputs()
        # 1)
        context_id1 = self.context_manager.create_context(
            state_hash=self.first_state_hash,
            inputs=test_addresses.inputs,
            outputs=test_addresses.outputs,
            base_contexts=[])

        values1 = [bytes(i) for i in range(len(test_addresses.writes))]
        self.context_manager.set(context_id1, [{
            a: v
        } for a, v in zip(test_addresses.writes, values1)])
        sh1 = squash(state_root=self.first_state_hash,
                     context_ids=[context_id1],
                     persist=True,
                     clean_up=True)
        # 2)
        context_a = self.context_manager.create_context(
            state_hash=sh1,
            inputs=test_addresses.writes,  # read from every address written to
            outputs=test_addresses.outputs,
            base_contexts=[])

        address_values = self.context_manager.get(context_a,
                                                  list(test_addresses.writes))
        self.assertEquals(address_values,
                          [(a, v)
                           for a, v in zip(test_addresses.writes, values1)])

        # 3)
        values2 = [bytes(v.encode()) for v in test_addresses.outputs]
        self.context_manager.set(
            context_id=context_a,
            address_value_list=[{
                a: v
            } for a, v in zip(test_addresses.outputs, values2)])

        # 4)
        context_id_a1 = self.context_manager.create_context(
            state_hash=sh1,
            inputs=test_addresses.outputs,
            outputs=test_addresses.outputs,
            base_contexts=[context_a])
        # 5)
        c_ida1_address_values = self.context_manager.get(
            context_id=context_id_a1,
            address_list=list(test_addresses.outputs))
        self.assertEquals(c_ida1_address_values,
                          [(a, v)
                           for a, v in zip(test_addresses.outputs, values2)])

        # 6)
        test_addresses2 = self._create_txn_inputs_outputs(80)
        context_id_a2 = self.context_manager.create_context(
            state_hash=sh1,
            inputs=test_addresses2.inputs,
            outputs=test_addresses2.outputs,
            base_contexts=[context_a])
        values3 = [bytes(v.encode()) for v in test_addresses2.writes]
        self.context_manager.set(
            context_id=context_id_a2,
            address_value_list=[{
                a: v
            } for a, v in zip(test_addresses2.writes, values3)],
        )
        # 7)
        context_id_b = self.context_manager.create_context(
            state_hash=sh1,
            inputs=test_addresses2.writes + test_addresses.outputs,
            outputs=[],
            base_contexts=[context_id_a1, context_id_a2])

        # 8)
        self.assertEquals(
            self.context_manager.get(
                context_id_b,
                list(test_addresses2.writes + test_addresses.outputs)),
            [(a, v)
             for a, v in zip(test_addresses2.writes +
                             test_addresses.outputs, values3 + values2)])

    def test_state_root_after_parallel_ctx(self):
        """Tests that the correct state root is calculated after basing one
        context off of multiple contexts.

                              i=abcd
                              o=aaaa
                           +>context_1+
                           |  aaaa=1  |
                           |          |
               i=llll      |   i=bacd |      i=bbbb,aaaa
               o=llll      |   o=bbbb |      o=cccc,llll
        sh0--->ctx_0-->sh1>|-->context_2-+---->context_n---->sh2
               llll=5      |   bbbb=2 |      cccc=4
                           |          |      llll=8
                           |   i=abcd |
                           |   o=cccc |
                           +>context_3+
                               cccc=3

        Notes:
            Test:
                1. Create a context, set a value in it and squash it into a new
                   state hash.
                2. Create 3 contexts based off of the state root from #1.
                3. Set values at addresses to all three contexts.
                4. Base another context off of the contexts from #2.
                5. Set a value to an address in this context that has already
                   been set to in the non-base context.
                6. Squash the contexts producing a state hash and assert
                   that it equals a state hash obtained by manually updating
                   the merkle tree.
        """

        sh0 = self.first_state_hash
        # 1)
        squash = self.context_manager.get_squash_handler()
        ctx_1 = self.context_manager.create_context(
            state_hash=sh0,
            base_contexts=[],
            inputs=[self._create_address('llll')],
            outputs=[self._create_address('llll')])
        self.context_manager.set(context_id=ctx_1,
                                 address_value_list=[{
                                     self._create_address('llll'):
                                     b'5'
                                 }])

        sh1 = squash(state_root=sh0,
                     context_ids=[ctx_1],
                     persist=True,
                     clean_up=True)

        # 2)
        context_1 = self.context_manager.create_context(
            state_hash=sh1,
            base_contexts=[],
            inputs=[self._create_address('abcd')],
            outputs=[self._create_address('aaaa')])
        context_2 = self.context_manager.create_context(
            state_hash=sh1,
            base_contexts=[],
            inputs=[self._create_address('bacd')],
            outputs=[self._create_address('bbbb')])
        context_3 = self.context_manager.create_context(
            state_hash=sh1,
            base_contexts=[],
            inputs=[self._create_address('abcd')],
            outputs=[
                self._create_address('cccc'),
                self._create_address('dddd')
            ])

        # 3)
        self.context_manager.set(context_id=context_1,
                                 address_value_list=[{
                                     self._create_address('aaaa'):
                                     b'1'
                                 }])
        self.context_manager.set(context_id=context_2,
                                 address_value_list=[{
                                     self._create_address('bbbb'):
                                     b'2'
                                 }])
        self.context_manager.set(context_id=context_3,
                                 address_value_list=[{
                                     self._create_address('cccc'):
                                     b'3'
                                 }])

        # 4)
        context_n = self.context_manager.create_context(
            state_hash=sh1,
            base_contexts=[context_1, context_2, context_3],
            inputs=[
                self._create_address('bbbb'),
                self._create_address('aaaa')
            ],
            outputs=[
                self._create_address('cccc'),
                self._create_address('llll')
            ])

        # 5)
        self.context_manager.set(context_id=context_n,
                                 address_value_list=[{
                                     self._create_address('cccc'):
                                     b'4',
                                     self._create_address('llll'):
                                     b'8'
                                 }])

        # 6)
        cm_state_root = squash(state_root=sh1,
                               context_ids=[context_n],
                               persist=False,
                               clean_up=True)

        tree = MerkleDatabase(self.database_results)
        calc_state_root = tree.update({
            self._create_address('aaaa'): b'1',
            self._create_address('bbbb'): b'2',
            self._create_address('cccc'): b'4',
            self._create_address('llll'): b'8'
        })
        self.assertEquals(calc_state_root, cm_state_root)

    def test_complex_basecontext_squash(self):
        """Tests complex context basing and squashing.
                                            i=qq,dd dd=0
                                            o=dd,pp pp=1
                                i=cc,aa  +->context_3_2a_1+|
                                o=dd,ll  |                 |
               i=aa,ab      +->context_2a|  i=aa    aa=0   |
               o=cc,ab      |   dd=10    |  o=aa,ll ll=1   |
        sh0->context_1-->sh1|   ll=11    +->context_3_2a_2+|->sh1
               cc=0         |   i=cc,aa  +->context_3_2b_1+|
               ab=1         |   o=nn,mm  |  i=nn,ba mm=0   |
                            +->context_2b|  o=mm,ba ba=1   |
                                nn=0     |                 |
                                mm=1     +->context_3_2b_2+|
                                            i=nn,oo ab=0
                                            o=ab,oo oo=1

        Notes:
            Test:
                1. Create a context off of the first state hash, set
                   addresses in it, and squash that context, getting a new
                   merkle root.
                2. Create 2 contexts with the context in # 1 as the base, and
                   for each of these contexts set addresses to values where the
                   outputs for each are disjoint.
                3. For each of these 2 contexts create 2 more contexts each
                   having one of the contexts in # 2 as the base context, and
                   set addresses to values.
                4. Squash the 4 contexts from #3 and assert the state hash
                   is equal to a manually computed state hash.
        """

        squash = self.context_manager.get_squash_handler()
        # 1)
        inputs_1 = [self._create_address('aa'), self._create_address('ab')]
        outputs_1 = [self._create_address('cc'), self._create_address('ab')]
        context_1 = self.context_manager.create_context(
            state_hash=self.first_state_hash,
            base_contexts=[],
            inputs=inputs_1,
            outputs=outputs_1)
        self.context_manager.set(
            context_id=context_1,
            address_value_list=[{
                a: v
            } for a, v in zip(outputs_1,
                              [bytes(i) for i in range(len(outputs_1))])])

        sh1 = squash(state_root=self.first_state_hash,
                     context_ids=[context_1],
                     persist=True,
                     clean_up=True)

        # 2)
        inputs_2a = [self._create_address('cc'), self._create_address('aa')]
        outputs_2a = [self._create_address('dd'), self._create_address('ll')]
        context_2a = self.context_manager.create_context(
            state_hash=self.first_state_hash,
            base_contexts=[],
            inputs=inputs_2a,
            outputs=outputs_2a)

        inputs_2b = [self._create_address('cc'), self._create_address('aa')]
        outputs_2b = [self._create_address('nn'), self._create_address('mm')]
        context_2b = self.context_manager.create_context(state_hash=sh1,
                                                         base_contexts=[],
                                                         inputs=inputs_2b,
                                                         outputs=outputs_2b)

        self.context_manager.set(
            context_id=context_2a,
            address_value_list=[{
                a: bytes(v)
            } for a, v in zip(outputs_2a, range(10, 10 + len(outputs_2a)))])
        self.context_manager.set(
            context_id=context_2b,
            address_value_list=[{
                a: bytes(v)
            } for a, v in zip(outputs_2b, range(len(outputs_2b)))])

        # 3)
        inputs_3_2a_1 = [
            self._create_address('qq'),
            self._create_address('dd')
        ]
        outputs_3_2a_1 = [
            self._create_address('dd'),
            self._create_address('pp')
        ]
        context_3_2a_1 = self.context_manager.create_context(
            state_hash=sh1,
            base_contexts=[context_2a],
            inputs=inputs_3_2a_1,
            outputs=outputs_3_2a_1)
        inputs_3_2a_2 = [self._create_address('aa')]
        outputs_3_2a_2 = [
            self._create_address('aa'),
            self._create_address('ll')
        ]
        context_3_2a_2 = self.context_manager.create_context(
            state_hash=sh1,
            base_contexts=[context_2a],
            inputs=inputs_3_2a_2,
            outputs=outputs_3_2a_2)

        inputs_3_2b_1 = [
            self._create_address('nn'),
            self._create_address('ab')
        ]
        outputs_3_2b_1 = [
            self._create_address('mm'),
            self._create_address('ba')
        ]
        context_3_2b_1 = self.context_manager.create_context(
            state_hash=sh1,
            base_contexts=[context_2b],
            inputs=inputs_3_2b_1,
            outputs=outputs_3_2b_1)

        inputs_3_2b_2 = [
            self._create_address('nn'),
            self._create_address('oo')
        ]
        outputs_3_2b_2 = [
            self._create_address('ab'),
            self._create_address('oo')
        ]
        context_3_2b_2 = self.context_manager.create_context(
            state_hash=sh1,
            base_contexts=[context_2b],
            inputs=inputs_3_2b_2,
            outputs=outputs_3_2b_2)

        self.context_manager.set(
            context_id=context_3_2a_1,
            address_value_list=[{
                a: bytes(v)
            } for a, v in zip(outputs_3_2a_1, range(len(outputs_3_2a_1)))])
        self.context_manager.set(
            context_id=context_3_2a_2,
            address_value_list=[{
                a: bytes(v)
            } for a, v in zip(outputs_3_2a_2, range(len(outputs_3_2a_2)))])
        self.context_manager.set(
            context_id=context_3_2b_1,
            address_value_list=[{
                a: bytes(v)
            } for a, v in zip(outputs_3_2b_1, range(len(outputs_3_2b_1)))])
        self.context_manager.set(
            context_id=context_3_2b_2,
            address_value_list=[{
                a: bytes(v)
            } for a, v in zip(outputs_3_2b_2, range(len(outputs_3_2b_2)))])

        # 4)
        sh2 = squash(state_root=sh1,
                     context_ids=[
                         context_3_2a_1, context_3_2a_2, context_3_2b_1,
                         context_3_2b_2
                     ],
                     persist=False,
                     clean_up=True)

        tree = MerkleDatabase(self.database_results)
        state_hash_from_1 = tree.update(set_items={
            a: v
            for a, v in zip(outputs_1,
                            [bytes(i) for i in range(len(outputs_1))])
        },
                                        virtual=False)
        self.assertEquals(
            state_hash_from_1, sh1,
            "The manually calculated state hash from the first "
            "context and the one calculated by squashing that "
            "state hash should be the same")
        tree.set_merkle_root(state_hash_from_1)
        test_sh2 = tree.update(
            set_items={
                self._create_address('aa'): bytes(0),
                self._create_address('ab'): bytes(0),
                self._create_address('ba'): bytes(1),
                self._create_address('dd'): bytes(0),
                self._create_address('ll'): bytes(1),
                self._create_address('mm'): bytes(0),
                self._create_address('oo'): bytes(1),
                self._create_address('pp'): bytes(1),
                self._create_address('nn'): bytes(0),
                self._create_address('cc'): bytes(0)
            })

        self.assertEquals(
            sh2, test_sh2, "Manually calculated and context "
            "manager calculated merkle hashes "
            "are the same")

    @unittest.skip("Necessary to catch scheduler bugs--Depth-first search")
    def test_check_for_bad_combination(self):
        """Tests that the context manager will raise
        an exception if asked to combine contexts, either via base contexts 
        in create_context or via squash that shouldn't be
        combined because they share addresses that can't be determined by the
        scheduler to not have been parallel. This is a check on scheduler bugs.

        Examples where the context manager should raise an exception on
        duplicate addresses:
        1. Success
              i=a
              o=b
           +>ctx_1+
        dup|->b=3 |
           |      |
        sh0|      ----->state hash or context
           |  i=q |
           |  o=b |
           +>ctx_2+
        dup-->b=2
        2.
                      i=b
                      o=d
                   +>ctx_1a_1+
                   |  d=4    |
             i=a   |         |
             o=b   |         |
           +>ctx_1a|         |
           | b=2   |  i=b    |
           |       |  o=c    |
        sh0|       +>ctx_1a_2|
           |     dup-> c=7   |------>state hash or context
           |  i=a  +>ctx_1b_1|
           |  o=c  |         |
           +>ctx_1b|         |
        dup-->c=5  |  i=t    |
                   |  o=p    |
                   +>ctx_1b_2+
                      p=8

        3.
                      i=b
                      o=d
                   +>ctx_1a_1+
                   |  d=4    |   i=d,c
             i=a   |         |   o=n
             o=b   |         <>ctx_3a+
           +>ctx_1a|         |   n=5 |
           | b=2   |  i=b    |       |
           |       |  o=c    |       |
        sh0|       +>ctx_1a_2+       <----->state hash or context
           |   dup--> c=7            |
           |  i=a  +>ctx_1b_1+       |
           |  o=c  |         |  i=c  |
           +>ctx_1b|         |  o=q  |
              c=5  |  i=c    <>ctx_3b+
                   |  o=c    |  q=5
                   +>ctx_1b_2+
               dup--> c=1

        """

        # 1.
        squash = self.context_manager.get_squash_handler()
        sh0 = self.first_state_hash
        inputs_1 = [self._create_address('a')]
        outputs_1 = [self._create_address('b')]
        ctx_1 = self.context_manager.create_context(state_hash=sh0,
                                                    base_contexts=[],
                                                    inputs=inputs_1,
                                                    outputs=outputs_1)
        self.context_manager.set(context_id=ctx_1,
                                 address_value_list=[{
                                     self._create_address('b'):
                                     b'3'
                                 }])

        inputs_2 = [self._create_address('q')]
        outputs_2 = [self._create_address('b')]
        ctx_2 = self.context_manager.create_context(state_hash=sh0,
                                                    base_contexts=[],
                                                    inputs=inputs_2,
                                                    outputs=outputs_2)
        self.context_manager.set(context_id=ctx_2,
                                 address_value_list=[{
                                     self._create_address('b'):
                                     b'2'
                                 }])

        try:
            sh1 = squash(state_root=sh0,
                         context_ids=[ctx_1, ctx_2],
                         persist=True,
                         clean_up=True)
            self.fail("squash of two contexts with a duplicate address")
        except Exception:
            pass
예제 #15
0
class TestContextManager(unittest.TestCase):

    def setUp(self):
        self.database_of_record = dict_database.DictDatabase()
        self.state_delta_store = StateDeltaStore(dict_database.DictDatabase())
        self.context_manager = context_manager.ContextManager(
            self.database_of_record, self.state_delta_store)
        self.first_state_hash = self.context_manager.get_first_root()

        # used for replicating state hash through direct merkle tree updates
        self.database_results = dict_database.DictDatabase()

    def tearDown(self):
        self.context_manager.stop()

    def _create_address(self, value=None):
        """
        Args:
            value: (str)

        Returns: (str) sha512 of value or random

        """
        if value is None:
            value = time.time().hex()
        return hashlib.sha512(value.encode()).hexdigest()[:70]

    def _setup_context(self):
        # 1) Create transaction data
        first_transaction = {'inputs': [self._create_address(a) for a in
                                        ['aaaa', 'bbbb', 'cccc']],
                             'outputs': [self._create_address(a) for a in
                                         ['llaa', 'aall', 'nnnn']]}
        second_transaction = {
            'inputs': [self._create_address(a) for a in
                       ['aaaa', 'dddd']],
            'outputs': [self._create_address(a) for a in
                        ['zzzz', 'yyyy', 'tttt', 'qqqq']]
        }
        third_transaction = {
            'inputs': [self._create_address(a) for a in
                       ['eeee', 'dddd', 'ffff']],
            'outputs': [self._create_address(a) for a in
                        ['oooo', 'oozz', 'zzoo', 'ppoo', 'aeio']]
        }
        # 2) Create contexts based on that data
        context_id_1 = self.context_manager.create_context(
            state_hash=self.first_state_hash,
            base_contexts=[],
            inputs=first_transaction['inputs'],
            outputs=first_transaction['outputs'])
        context_id_2 = self.context_manager.create_context(
            state_hash=self.first_state_hash,
            base_contexts=[],
            inputs=second_transaction['inputs'],
            outputs=second_transaction['outputs'])
        context_id_3 = self.context_manager.create_context(
            state_hash=self.first_state_hash,
            base_contexts=[],
            inputs=third_transaction['inputs'],
            outputs=third_transaction['outputs'])

        # 3) Set addresses with values
        self.context_manager.set(context_id_1, [{self._create_address(a): v}
                                                for a, v in [('llaa', b'1'),
                                                             ('aall', b'2'),
                                                             ('nnnn', b'3')]])
        self.context_manager.set(context_id_2, [{self._create_address(a): v}
                                                for a, v in [('zzzz', b'9'),
                                                             ('yyyy', b'11'),
                                                             ('tttt', b'12'),
                                                             ('qqqq', b'13')]])
        self.context_manager.set(context_id_3, [{self._create_address(a): v}
                                                for a, v in [('oooo', b'25'),
                                                             ('oozz', b'26'),
                                                             ('zzoo', b'27'),
                                                             ('ppoo', b'28'),
                                                             ('aeio', b'29')]])

        # 4)
        context_id = self.context_manager.create_context(
            state_hash=self.first_state_hash,
            base_contexts=[context_id_1, context_id_2, context_id_3],
            inputs=[self._create_address(a)
                    for a in ['llaa', 'yyyy', 'tttt', 'zzoo']],
            outputs=[self._create_address(a)
                     for a in ['llaa', 'yyyy', 'tttt', 'zzoo', 'aeio']])
        return context_id

    def _create_txn_inputs_outputs(self, start=None):
        """Create unique addresses that make up the inputs, outputs,
         reads, and writes that are involved in a context.

         Venn Diagram of relationship of disjoint sets that make up the
         inputs, outputs, reads, and writes.

         Knowledge of which disjoint set an address is a part of
         may give knowledge about a test failure in the context
         manager.

                    Inputs                      Outputs
            +----------+--------------------------+-----------+
            |          |                          |           |
            |  i___    |Reads       io__        Writes  _o__  |
            |          |                          |           |
            |    +-----------+-----------+---------------+    |
            |    |     |     |           |        |      |    |
            |    |     |     |           |        |      |    |
            |    |     |     |           |        |      |    |
            |    |     |     |           |        |      |    |
            |    |i_r_ | ior_|  iorw     |  io_w  | _o_w |    |
            |    |     |     |           |        |      |    |
            |    |     |     |           |        |      |    |
            |    |     |     |           |        |      |    |
            |    +-----------+-----------+---------------+    |
            |          |                          |           |
            |          |                          |           |
            +----------+--------------------------+-----------+

        Args:
            start (int): An integer to start the sequence of integers being
            hashed to addresses.

        Returns (namedtuple): An object that holds inputs, outputs, reads,
            and writes.

        """
        if start is None:
            start = 0
        iorw = [self._create_address(str(i)) for i in range(start,
                                                            start + 10)]
        i_r_ = [self._create_address(str(i)) for i in range(start + 10,
                                                            start + 20)]
        ior_ = [self._create_address(str(i)) for i in range(start + 20,
                                                            start + 30)]
        io__ = [self._create_address(str(i)) for i in range(start + 30,
                                                            start + 40)]
        io_w = [self._create_address(str(i)) for i in range(start + 40,
                                                            start + 50)]
        _o_w = [self._create_address(str(i)) for i in range(start + 50,
                                                            start + 60)]
        _o__ = [self._create_address(str(i)) for i in range(start + 60,
                                                            start + 70)]
        i___ = [self._create_address(str(i)) for i in range(start + 70,
                                                            start + 80)]
        addresses = TestAddresses(
            inputs=iorw + ior_ + io__ + io_w + i___,
            outputs=ior_ + io__ + io_w + _o__ + _o_w,
            reads=i_r_ + ior_,
            writes=io_w + _o_w
        )
        return addresses

    def test_address_enforcement(self):
        """Tests that the ContextManager enforces address characteristics.

        Notes:
            1. Call get and set on the ContextManager with an address that is
               under a namespace, but is an invalid address, and test that
               the methods raise an AuthorizationException.
        """

        # 1)
        invalid_address1 = 'a' * 69 + 'n'
        invalid_address2 = 'b' * 69 + 'y'

        context_id1 = self.context_manager.create_context(
            state_hash=self.context_manager.get_first_root(),
            base_contexts=[],
            inputs=['aaaaaaaa', 'bbbbbbbb'],
            outputs=['aaaaaaaa', 'bbbbbbbb'])
        with self.assertRaises(context_manager.AuthorizationException):
            self.context_manager.get(
                context_id=context_id1,
                address_list=[invalid_address1, invalid_address2])
        with self.assertRaises(context_manager.AuthorizationException):
            self.context_manager.set(
                context_id=context_id1,
                address_value_list=[{invalid_address1: b'1'},
                                    {invalid_address2: b'2'}])

    def test_get_set_wrong_namespace(self):
        """Tests that getting and setting from outside the namespace will
        raise a AuthorizationException.

        Notes:
            1. Assert that sets on a context with addresses that aren't
               under an output namespace raise an AuthorizationException.

            2. Assert that gets on a context with addresses that aren't under
               an input namespace raise an AuthorizationException.
        """

        wrong_namespace1 = self._create_address('a')[-10:]
        wrong_namespace2 = '00000000'

        ctx_1 = self.context_manager.create_context(
            state_hash=self.context_manager.get_first_root(),
            base_contexts=[],
            inputs=[wrong_namespace1, wrong_namespace2],
            outputs=[wrong_namespace1, wrong_namespace2])
        # 1
        with self.assertRaises(context_manager.AuthorizationException):
            self.context_manager.set(
                context_id=ctx_1,
                address_value_list=[{self._create_address('a'): b'1'}])

        with self.assertRaises(context_manager.AuthorizationException):
            self.context_manager.set(
                context_id=ctx_1,
                address_value_list=[{self._create_address('c'): b'5'}])
        # 2
        with self.assertRaises(context_manager.AuthorizationException):
            self.context_manager.get(
                context_id=ctx_1,
                address_list=[self._create_address('a')])

        with self.assertRaises(context_manager.AuthorizationException):
            self.context_manager.get(
                context_id=ctx_1,
                address_list=[self._create_address('c')])

    def test_exception_on_invalid_input(self):
        """Tests that invalid inputs raise an exception. Tested with invalid
        characters, odd number of characters, and too long namespace;

        Notes:
            1) Assert that inputs with a namespace with an odd number of
               characters raise a CreateContextException.
            2) Assert that inputs with a 71 character namespace raise a
               CreateContextException.
            3) Assert that inputs with a namespace with several invalid
               characters raise a CreateContextException.
        """

        invalid_input_output1 = '0db7e8zc'  # invalid character
        invalid_input_output2 = '7ef84ed' * 10 + '5'  # too long, 71 chars
        invalid_input_output3 = 'yy76ftoph7465873ddde389f'  # invalid chars

        valid_input_output1 = 'd8f533bbb74443222daad4'
        valid_input_output2 = '77465847465784757848ddddddf'

        state_hash = self.context_manager.get_first_root()

        # 1
        with self.assertRaises(context_manager.CreateContextException):
            self.context_manager.create_context(
                state_hash=state_hash,
                base_contexts=[],
                inputs=[invalid_input_output1, valid_input_output1],
                outputs=[valid_input_output2])
        # 2
        with self.assertRaises(context_manager.CreateContextException):
            self.context_manager.create_context(
                state_hash=state_hash,
                base_contexts=[],
                inputs=[valid_input_output1, invalid_input_output2],
                outputs=[valid_input_output2])
        # 3
        with self.assertRaises(context_manager.CreateContextException):
            self.context_manager.create_context(
                state_hash=state_hash,
                base_contexts=[],
                inputs=[invalid_input_output3, valid_input_output2],
                outputs=[valid_input_output2, valid_input_output1])

    def test_exception_on_invalid_output(self):
        """Tests that invalid outputs raise an exception. Tested with invalid
        characters, odd number of characters, and too long namespace;

        Notes:
            1) Assert that outputs with a namespace with an odd number of
               characters raise a CreateContextException.
            2) Assert that outputs with a 71 character namespace raise a
               CreateContextException.
            3) Assert that outputs with a namespace with several invalid
               characters raise a CreateContextException.
        """

        invalid_input_output1 = '0db7e87'  # Odd number of characters
        invalid_input_output2 = '7ef84ed' * 10 + '5'  # too long, 71 chars
        invalid_input_output3 = 'yy76ftoph7465873ddde389f'  # invalid chars

        valid_input_output1 = 'd8f533bbb74443222daad4'
        valid_input_output2 = '77465847465784757848ddddddff'

        state_hash = self.context_manager.get_first_root()

        # 1
        with self.assertRaises(context_manager.CreateContextException):
            self.context_manager.create_context(
                state_hash=state_hash,
                base_contexts=[],
                inputs=[valid_input_output2, valid_input_output1],
                outputs=[invalid_input_output1])
        # 2
        with self.assertRaises(context_manager.CreateContextException):
            self.context_manager.create_context(
                state_hash=state_hash,
                base_contexts=[],
                inputs=[valid_input_output1, valid_input_output2],
                outputs=[invalid_input_output2])
        # 3
        with self.assertRaises(context_manager.CreateContextException):
            self.context_manager.create_context(
                state_hash=state_hash,
                base_contexts=[],
                inputs=[valid_input_output1, valid_input_output2],
                outputs=[valid_input_output2, invalid_input_output3])

    def test_namespace_gets(self):
        """Tests that gets for an address under a namespace will return the
        correct value.

        Notes:
            1) Create ctx_1 and set 'b' to b'8'.
            2) squash the previous context creating state_hash_1.
            3) Create 2 contexts off of this state hash and assert
               that gets on these contexts retrieve the correct
               value for an address that is not fully specified in the inputs.
            4) Set values to addresses in these contexts.
            5) Create 1 context off of these prior 2 contexts and assert that
               gets from this context retrieve the correct values for
               addresses that are not fully specified in the inputs. 2 of the
               values are found in the chain of contexts, and 1 is not found
               and so must be retrieved from the merkle tree.
        """

        # 1
        ctx_1 = self.context_manager.create_context(
            state_hash=self.context_manager.get_first_root(),
            base_contexts=[],
            inputs=[self._create_address('a')],
            outputs=[self._create_address('b')])

        self.context_manager.set(
            context_id=ctx_1,
            address_value_list=[{self._create_address('b'): b'8'}])

        # 2
        squash = self.context_manager.get_squash_handler()
        state_hash_1 = squash(
            state_root=self.context_manager.get_first_root(),
            context_ids=[ctx_1],
            persist=True,
            clean_up=True)

        # 3
        ctx_1a = self.context_manager.create_context(
            state_hash=state_hash_1,
            base_contexts=[],
            inputs=[self._create_address('a')[:10]],
            outputs=[self._create_address('c')])
        self.assertEquals(
            self.context_manager.get(
                context_id=ctx_1a,
                address_list=[self._create_address('a')]),
            [(self._create_address('a'), None)])

        ctx_1b = self.context_manager.create_context(
            state_hash=state_hash_1,
            base_contexts=[],
            inputs=[self._create_address('b')[:6]],
            outputs=[self._create_address('z')])
        self.assertEquals(
            self.context_manager.get(
                context_id=ctx_1b,
                address_list=[self._create_address('b')]),
            [(self._create_address('b'), b'8')])

        # 4
        self.context_manager.set(
            context_id=ctx_1b,
            address_value_list=[{self._create_address('z'): b'2'}])

        self.context_manager.set(
            context_id=ctx_1a,
            address_value_list=[{self._create_address('c'): b'1'}]
        )

        ctx_2 = self.context_manager.create_context(
            state_hash=state_hash_1,
            base_contexts=[ctx_1a, ctx_1b],
            inputs=[self._create_address('z')[:10],
                    self._create_address('c')[:10],
                    self._create_address('b')[:10]],
            outputs=[self._create_address('w')])

        self.assertEquals(
            self.context_manager.get(
                context_id=ctx_2,
                address_list=[self._create_address('z'),
                              self._create_address('c'),
                              self._create_address('b')]),
            [(self._create_address('z'), b'2'),
             (self._create_address('c'), b'1'),
             (self._create_address('b'), b'8')])

    def test_create_context_with_prior_state(self):
        """Tests context creation with prior state from base contexts.

        Notes:
            Set up the context:
                Create 3 prior contexts each with 3-5 addresses to set to.
                Make set calls to those addresses.
                Create 1 new context based on those three prior contexts.
                this test method:
            Test:
                Make a get call on addresses that are from prior state,
                making assertions about the correct values.
        """
        context_id = self._setup_context()

        self.assertEqual(self.context_manager.get(
            context_id,
            [self._create_address(a) for a in
             ['llaa', 'yyyy', 'tttt', 'zzoo']]),
            [(self._create_address(a), v) for a, v in
             [('llaa', b'1'),
             ('yyyy', b'11'),
             ('tttt', b'12'),
             ('zzoo', b'27')]])

    def test_squash(self):
        """Tests that squashing a context based on state from other
        contexts will result in the same merkle hash as updating the
        merkle tree with the same data.

        Notes:
            Set up the context

            Test:
                1) Make set calls on several of the addresses.
                2) Squash the context to get a new state hash.
                3) Apply all of the aggregate sets from all
                of the contexts, to another database with a merkle tree.
                4) Assert that the state hashes are the same.
                5) Assert that the state deltas have been stored
        """
        # 1)
        context_id = self._setup_context()
        self.context_manager.set(
            context_id,
            [{self._create_address(a): v} for a, v in
             [('yyyy', b'2'),
              ('tttt', b'4')]])

        # 2)
        squash = self.context_manager.get_squash_handler()
        resulting_state_hash = squash(self.first_state_hash, [context_id],
                                      persist=True, clean_up=True)

        # 3)
        final_state_to_update = {self._create_address(a): v for a, v in
                                 [('llaa', b'1'),
                                  ('aall', b'2'),
                                  ('nnnn', b'3'),
                                  ('zzzz', b'9'),
                                  ('yyyy', b'2'),
                                  ('tttt', b'4'),
                                  ('qqqq', b'13'),
                                  ('oooo', b'25'),
                                  ('oozz', b'26'),
                                  ('zzoo', b'27'),
                                  ('ppoo', b'28'),
                                  ('aeio', b'29')]}

        test_merkle_tree = MerkleDatabase(self.database_results)
        test_resulting_state_hash = test_merkle_tree.update(
            final_state_to_update, virtual=False)
        # 4)
        self.assertEqual(resulting_state_hash, test_resulting_state_hash)
        state_changes = self.state_delta_store.get_state_deltas(
            resulting_state_hash)

        # 5)
        for addr, value in final_state_to_update.items():
            expected_state_change = StateChange(
                address=addr,
                value=value,
                type=StateChange.SET)

            self.assertTrue(expected_state_change in state_changes)

    def test_squash_no_updates(self):
        """Tests that squashing a context that has no state updates will return
           the starting state root hash.

        Notes:
            Set up the context

            Test:
                1) Squash the context.
                2) Assert that the state hash is the same as the starting
                hash.
                3) Assert that the state deltas have not been overwritten
        """
        self.state_delta_store.save_state_deltas(
            self.first_state_hash,
            [StateChange(address='aaa', value=b'xyz', type=StateChange.SET)])

        context_id = self.context_manager.create_context(
            state_hash=self.first_state_hash,
            base_contexts=[],
            inputs=[],
            outputs=[])
        # 1)
        squash = self.context_manager.get_squash_handler()
        resulting_state_hash = squash(self.first_state_hash, [context_id],
                                      persist=True, clean_up=True)
        # 2
        self.assertIsNotNone(resulting_state_hash)
        self.assertEquals(resulting_state_hash, self.first_state_hash)

        # 3
        changes = self.state_delta_store.get_state_deltas(resulting_state_hash)

        self.assertEqual(
            [StateChange(address='aaa', value=b'xyz', type=StateChange.SET)],
            [c for c in changes])

    def test_reads_from_context_w_several_writes(self):
        """Tests that those context values that have been written to the
        Merkle tree, or that have been set to a base_context, will have the
        correct value at the address for a given context.

                                               ->context_id_a1
                                               |              |
                                               |              |
                                               |              |
        sh0-->context_id1-->sh1-->context_a-----              -->context_id_b
                                               |              |
                                               |              |
                                               |              |
                                               |              |
                                               -->context_id_a2

        Notes:

            Test:
                1. From a Merkle Tree with only the root node in it, create a
                   context and set several values, and then squash that context
                   upon the first state hash.
                2. Create a context with no base context, based on
                   the merkle root computed from the first squash.
                   Assert that gets from this context will provide
                   values that were set in the first context.
                3. Write to all of the available outputs
                4. Create a new context based on context_a, from #2,
                5. Assert that gets from this context equal the values set
                   to Context A.
                6. Create a new context based on context_a and set values to
                   this context.
                7. Create a new context based on the 2 contexts made in 4 and 6
                8. From this context assert that gets equal the correct values
                   set in the prior contexts.
        """

        squash = self.context_manager.get_squash_handler()
        test_addresses = self._create_txn_inputs_outputs()
        # 1)
        context_id1 = self.context_manager.create_context(
            state_hash=self.first_state_hash,
            inputs=test_addresses.inputs,
            outputs=test_addresses.outputs,
            base_contexts=[])

        values1 = [bytes(i) for i in range(len(test_addresses.writes))]
        self.context_manager.set(
            context_id1,
            [{a: v} for a, v in zip(test_addresses.writes, values1)])
        sh1 = squash(
            state_root=self.first_state_hash,
            context_ids=[context_id1],
            persist=True,
            clean_up=True)
        # 2)
        context_a = self.context_manager.create_context(
            state_hash=sh1,
            inputs=test_addresses.writes,  # read from every address written to
            outputs=test_addresses.outputs,
            base_contexts=[]
        )

        address_values = self.context_manager.get(
                context_a,
                list(test_addresses.writes)
            )
        self.assertEquals(
            address_values,
            [(a, v) for a, v in zip(test_addresses.writes, values1)]
            )

        # 3)
        values2 = [bytes(v.encode()) for v in test_addresses.outputs]
        self.context_manager.set(
            context_id=context_a,
            address_value_list=[{a: v} for
                                a, v in zip(test_addresses.outputs, values2)])

        # 4)
        context_id_a1 = self.context_manager.create_context(
            state_hash=sh1,
            inputs=test_addresses.outputs,
            outputs=test_addresses.outputs,
            base_contexts=[context_a]
        )
        # 5)
        c_ida1_address_values = self.context_manager.get(
            context_id=context_id_a1,
            address_list=list(test_addresses.outputs)
        )
        self.assertEquals(
            c_ida1_address_values,
            [(a, v) for a, v in zip(test_addresses.outputs, values2)]
        )

        # 6)
        test_addresses2 = self._create_txn_inputs_outputs(80)
        context_id_a2 = self.context_manager.create_context(
            state_hash=sh1,
            inputs=test_addresses2.inputs,
            outputs=test_addresses2.outputs,
            base_contexts=[context_a]
        )
        values3 = [bytes(v.encode()) for v in test_addresses2.writes]
        self.context_manager.set(
            context_id=context_id_a2,
            address_value_list=[{a: v} for
                                a, v in zip(test_addresses2.writes, values3)],
        )
        # 7)
        context_id_b = self.context_manager.create_context(
            state_hash=sh1,
            inputs=test_addresses2.writes + test_addresses.outputs,
            outputs=[],
            base_contexts=[context_id_a1, context_id_a2]
        )

        # 8)
        self.assertEquals(
            self.context_manager.get(
                context_id_b,
                list(test_addresses2.writes + test_addresses.outputs)
            ),
            [(a, v) for a, v in zip(
                test_addresses2.writes + test_addresses.outputs,
                values3 + values2)]
        )

    def test_state_root_after_parallel_ctx(self):
        """Tests that the correct state root is calculated after basing one
        context off of multiple contexts.

                              i=abcd
                              o=aaaa
                           +>context_1+
                           |  aaaa=1  |
                           |          |
               i=llll      |   i=bacd |      i=bbbb,aaaa
               o=llll      |   o=bbbb |      o=cccc,llll
        sh0--->ctx_0-->sh1>|-->context_2-+---->context_n---->sh2
               llll=5      |   bbbb=2 |      cccc=4
                           |          |      llll=8
                           |   i=abcd |
                           |   o=cccc |
                           +>context_3+
                               cccc=3

        Notes:
            Test:
                1. Create a context, set a value in it and squash it into a new
                   state hash.
                2. Create 3 contexts based off of the state root from #1.
                3. Set values at addresses to all three contexts.
                4. Base another context off of the contexts from #2.
                5. Set a value to an address in this context that has already
                   been set to in the non-base context.
                6. Squash the contexts producing a state hash and assert
                   that it equals a state hash obtained by manually updating
                   the merkle tree.
        """

        sh0 = self.first_state_hash
        # 1)
        squash = self.context_manager.get_squash_handler()
        ctx_1 = self.context_manager.create_context(
            state_hash=sh0,
            base_contexts=[],
            inputs=[self._create_address('llll')],
            outputs=[self._create_address('llll')]
        )
        self.context_manager.set(
            context_id=ctx_1,
            address_value_list=[{self._create_address('llll'): b'5'}]
        )

        sh1 = squash(
            state_root=sh0,
            context_ids=[ctx_1],
            persist=True,
            clean_up=True)

        # 2)
        context_1 = self.context_manager.create_context(
            state_hash=sh1,
            base_contexts=[],
            inputs=[self._create_address('abcd')],
            outputs=[self._create_address('aaaa')]
        )
        context_2 = self.context_manager.create_context(
            state_hash=sh1,
            base_contexts=[],
            inputs=[self._create_address('bacd')],
            outputs=[self._create_address('bbbb')]
        )
        context_3 = self.context_manager.create_context(
            state_hash=sh1,
            base_contexts=[],
            inputs=[self._create_address('abcd')],
            outputs=[self._create_address('cccc'),
                     self._create_address('dddd')]
        )

        # 3)
        self.context_manager.set(
            context_id=context_1,
            address_value_list=[{self._create_address('aaaa'): b'1'}]
        )
        self.context_manager.set(
            context_id=context_2,
            address_value_list=[{self._create_address('bbbb'): b'2'}]
        )
        self.context_manager.set(
            context_id=context_3,
            address_value_list=[{self._create_address('cccc'): b'3'}]
        )

        # 4)
        context_n = self.context_manager.create_context(
            state_hash=sh1,
            base_contexts=[context_1, context_2, context_3],
            inputs=[self._create_address('bbbb'), self._create_address('aaaa')],
            outputs=[self._create_address('cccc'), self._create_address('llll')]
        )

        # 5)
        self.context_manager.set(
            context_id=context_n,
            address_value_list=[{self._create_address('cccc'): b'4',
                                 self._create_address('llll'): b'8'}]
        )

        # 6)
        cm_state_root = squash(
            state_root=sh1,
            context_ids=[context_n],
            persist=False,
            clean_up=True)

        tree = MerkleDatabase(self.database_results)
        calc_state_root = tree.update({self._create_address('aaaa'): b'1',
                                       self._create_address('bbbb'): b'2',
                                       self._create_address('cccc'): b'4',
                                       self._create_address('llll'): b'8'})
        self.assertEquals(calc_state_root, cm_state_root)

    def test_complex_basecontext_squash(self):
        """Tests complex context basing and squashing.
                                            i=qq,dd dd=0
                                            o=dd,pp pp=1
                                i=cc,aa  +->context_3_2a_1+|
                                o=dd,ll  |                 |
               i=aa,ab      +->context_2a|  i=aa    aa=0   |
               o=cc,ab      |   dd=10    |  o=aa,ll ll=1   |
        sh0->context_1-->sh1|   ll=11    +->context_3_2a_2+|->sh1
               cc=0         |   i=cc,aa  +->context_3_2b_1+|
               ab=1         |   o=nn,mm  |  i=nn,ba mm=0   |
                            +->context_2b|  o=mm,ba ba=1   |
                                nn=0     |                 |
                                mm=1     +->context_3_2b_2+|
                                            i=nn,oo ab=0
                                            o=ab,oo oo=1

        Notes:
            Test:
                1. Create a context off of the first state hash, set
                   addresses in it, and squash that context, getting a new
                   merkle root.
                2. Create 2 contexts with the context in # 1 as the base, and
                   for each of these contexts set addresses to values where the
                   outputs for each are disjoint.
                3. For each of these 2 contexts create 2 more contexts each
                   having one of the contexts in # 2 as the base context, and
                   set addresses to values.
                4. Squash the 4 contexts from #3 and assert the state hash
                   is equal to a manually computed state hash.
        """

        squash = self.context_manager.get_squash_handler()
        # 1)
        inputs_1 = [self._create_address('aa'),
                    self._create_address('ab')]
        outputs_1 = [self._create_address('cc'),
                     self._create_address('ab')]
        context_1 = self.context_manager.create_context(
            state_hash=self.first_state_hash,
            base_contexts=[],
            inputs=inputs_1,
            outputs=outputs_1)
        self.context_manager.set(
            context_id=context_1,
            address_value_list=[{a: v} for a, v in zip(
                outputs_1, [bytes(i) for i in range(len(outputs_1))])])

        sh1 = squash(
            state_root=self.first_state_hash,
            context_ids=[context_1],
            persist=True,
            clean_up=True)

        # 2)
        inputs_2a = [self._create_address('cc'),
                     self._create_address('aa')]
        outputs_2a = [self._create_address('dd'),
                      self._create_address('ll')]
        context_2a = self.context_manager.create_context(
            state_hash=self.first_state_hash,
            base_contexts=[],
            inputs=inputs_2a,
            outputs=outputs_2a)

        inputs_2b = [self._create_address('cc'),
                     self._create_address('aa')]
        outputs_2b = [self._create_address('nn'),
                      self._create_address('mm')]
        context_2b = self.context_manager.create_context(
            state_hash=sh1,
            base_contexts=[],
            inputs=inputs_2b,
            outputs=outputs_2b)

        self.context_manager.set(
            context_id=context_2a,
            address_value_list=[{a: bytes(v)}
                                for a, v in zip(outputs_2a,
                                                range(10,
                                                      10 + len(outputs_2a)))]
        )
        self.context_manager.set(
            context_id=context_2b,
            address_value_list=[{a: bytes(v)}
                                for a, v in zip(outputs_2b,
                                                range(len(outputs_2b)))]
        )

        # 3)
        inputs_3_2a_1 = [self._create_address('qq'),
                         self._create_address('dd')]
        outputs_3_2a_1 = [self._create_address('dd'),
                          self._create_address('pp')]
        context_3_2a_1 = self.context_manager.create_context(
            state_hash=sh1,
            base_contexts=[context_2a],
            inputs=inputs_3_2a_1,
            outputs=outputs_3_2a_1
        )
        inputs_3_2a_2 = [self._create_address('aa')]
        outputs_3_2a_2 = [self._create_address('aa'),
                          self._create_address('ll')]
        context_3_2a_2 = self.context_manager.create_context(
            state_hash=sh1,
            base_contexts=[context_2a],
            inputs=inputs_3_2a_2,
            outputs=outputs_3_2a_2)

        inputs_3_2b_1 = [self._create_address('nn'),
                         self._create_address('ab')]
        outputs_3_2b_1 = [self._create_address('mm'),
                          self._create_address('ba')]
        context_3_2b_1 = self.context_manager.create_context(
            state_hash=sh1,
            base_contexts=[context_2b],
            inputs=inputs_3_2b_1,
            outputs=outputs_3_2b_1)

        inputs_3_2b_2 = [self._create_address('nn'),
                         self._create_address('oo')]
        outputs_3_2b_2 = [self._create_address('ab'),
                          self._create_address('oo')]
        context_3_2b_2 = self.context_manager.create_context(
            state_hash=sh1,
            base_contexts=[context_2b],
            inputs=inputs_3_2b_2,
            outputs=outputs_3_2b_2)

        self.context_manager.set(
            context_id=context_3_2a_1,
            address_value_list=[{a: bytes(v)}
                                for a, v in zip(outputs_3_2a_1,
                                                range(len(outputs_3_2a_1)))])
        self.context_manager.set(
            context_id=context_3_2a_2,
            address_value_list=[{a: bytes(v)}
                                for a, v in zip(outputs_3_2a_2,
                                                range(len(outputs_3_2a_2)))])
        self.context_manager.set(
            context_id=context_3_2b_1,
            address_value_list=[{a: bytes(v)}
                                for a, v in zip(outputs_3_2b_1,
                                                range(len(outputs_3_2b_1)))])
        self.context_manager.set(
            context_id=context_3_2b_2,
            address_value_list=[{a: bytes(v)}
                                for a, v in zip(outputs_3_2b_2,
                                                range(len(outputs_3_2b_2)))])

        # 4)
        sh2 = squash(
            state_root=sh1,
            context_ids=[context_3_2a_1, context_3_2a_2,
                         context_3_2b_1, context_3_2b_2],
            persist=False,
            clean_up=True)

        tree = MerkleDatabase(self.database_results)
        state_hash_from_1 = tree.update(
            set_items={a: v for a, v in zip(outputs_1,
                                        [bytes(i)
                                         for i in range(len(outputs_1))])},
                                        virtual=False)
        self.assertEquals(state_hash_from_1, sh1,
                          "The manually calculated state hash from the first "
                          "context and the one calculated by squashing that "
                          "state hash should be the same")
        tree.set_merkle_root(state_hash_from_1)
        test_sh2 = tree.update(set_items={self._create_address('aa'): bytes(0),
                                          self._create_address('ab'): bytes(0),
                                          self._create_address('ba'): bytes(1),
                                          self._create_address('dd'): bytes(0),
                                          self._create_address('ll'): bytes(1),
                                          self._create_address('mm'): bytes(0),
                                          self._create_address('oo'): bytes(1),
                                          self._create_address('pp'): bytes(1),
                                          self._create_address('nn'): bytes(0),
                                          self._create_address('cc'): bytes(0)})

        self.assertEquals(sh2, test_sh2, "Manually calculated and context "
                                         "manager calculated merkle hashes "
                                         "are the same")

    @unittest.skip("Necessary to catch scheduler bugs--Depth-first search")
    def test_check_for_bad_combination(self):
        """Tests that the context manager will raise
        an exception if asked to combine contexts, either via base contexts 
        in create_context or via squash that shouldn't be
        combined because they share addresses that can't be determined by the
        scheduler to not have been parallel. This is a check on scheduler bugs.

        Examples where the context manager should raise an exception on
        duplicate addresses:
        1. Success
              i=a
              o=b
           +>ctx_1+
        dup|->b=3 |
           |      |
        sh0|      ----->state hash or context
           |  i=q |
           |  o=b |
           +>ctx_2+
        dup-->b=2
        2.
                      i=b
                      o=d
                   +>ctx_1a_1+
                   |  d=4    |
             i=a   |         |
             o=b   |         |
           +>ctx_1a|         |
           | b=2   |  i=b    |
           |       |  o=c    |
        sh0|       +>ctx_1a_2|
           |     dup-> c=7   |------>state hash or context
           |  i=a  +>ctx_1b_1|
           |  o=c  |         |
           +>ctx_1b|         |
        dup-->c=5  |  i=t    |
                   |  o=p    |
                   +>ctx_1b_2+
                      p=8

        3.
                      i=b
                      o=d
                   +>ctx_1a_1+
                   |  d=4    |   i=d,c
             i=a   |         |   o=n
             o=b   |         <>ctx_3a+
           +>ctx_1a|         |   n=5 |
           | b=2   |  i=b    |       |
           |       |  o=c    |       |
        sh0|       +>ctx_1a_2+       <----->state hash or context
           |   dup--> c=7            |
           |  i=a  +>ctx_1b_1+       |
           |  o=c  |         |  i=c  |
           +>ctx_1b|         |  o=q  |
              c=5  |  i=c    <>ctx_3b+
                   |  o=c    |  q=5
                   +>ctx_1b_2+
               dup--> c=1

        """

        # 1.
        squash = self.context_manager.get_squash_handler()
        sh0 = self.first_state_hash
        inputs_1 = [self._create_address('a')]
        outputs_1 = [self._create_address('b')]
        ctx_1 = self.context_manager.create_context(
            state_hash=sh0,
            base_contexts=[],
            inputs=inputs_1,
            outputs=outputs_1
        )
        self.context_manager.set(
            context_id=ctx_1,
            address_value_list=[{self._create_address('b'): b'3'}]
        )

        inputs_2 = [self._create_address('q')]
        outputs_2 = [self._create_address('b')]
        ctx_2 = self.context_manager.create_context(
            state_hash=sh0,
            base_contexts=[],
            inputs=inputs_2,
            outputs=outputs_2
        )
        self.context_manager.set(
            context_id=ctx_2,
            address_value_list=[{self._create_address('b'): b'2'}]
        )

        try:
            sh1 = squash(
                state_root=sh0,
                context_ids=[ctx_1, ctx_2],
                persist=True,
                clean_up=True)
            self.fail("squash of two contexts with a duplicate address")
        except Exception:
            pass
예제 #16
0
    def __init__(self, network_endpoint, component_endpoint, public_uri,
                 peering, join_list, peer_list, data_dir, config_dir,
                 identity_signing_key):
        """Constructs a validator instance.

        Args:
            network_endpoint (str): the network endpoint
            component_endpoint (str): the component endpoint
            public_uri (str): the zmq-style URI of this validator's
                publically reachable endpoint
            peering (str): The type of peering approach. Either 'static'
                or 'dynamic'. In 'static' mode, no attempted topology
                buildout occurs -- the validator only attempts to initiate
                peering connections with endpoints specified in the
                peer_list. In 'dynamic' mode, the validator will first
                attempt to initiate peering connections with endpoints
                specified in the peer_list and then attempt to do a
                topology buildout starting with peer lists obtained from
                endpoints in the join_list. In either mode, the validator
                will accept incoming peer requests up to max_peers.
            join_list (list of str): a list of addresses to connect
                to in order to perform the initial topology buildout
            peer_list (list of str): a list of peer addresses
            data_dir (str): path to the data directory
            config_dir (str): path to the config directory
            identity_signing_key (str): key validator uses for signing
        """
        db_filename = os.path.join(
            data_dir, 'merkle-{}.lmdb'.format(network_endpoint[-2:]))
        LOGGER.debug('database file is %s', db_filename)

        merkle_db = LMDBNoLockDatabase(db_filename, 'c')

        delta_db_filename = os.path.join(
            data_dir, 'state-deltas-{}.lmdb'.format(network_endpoint[-2:]))
        LOGGER.debug('state delta store file is %s', delta_db_filename)
        state_delta_db = LMDBNoLockDatabase(delta_db_filename, 'c')

        state_delta_store = StateDeltaStore(state_delta_db)

        context_manager = ContextManager(merkle_db, state_delta_store)
        self._context_manager = context_manager

        state_view_factory = StateViewFactory(merkle_db)

        block_db_filename = os.path.join(
            data_dir, 'block-{}.lmdb'.format(network_endpoint[-2:]))
        LOGGER.debug('block store file is %s', block_db_filename)

        block_db = LMDBNoLockDatabase(block_db_filename, 'c')
        block_store = BlockStore(block_db)

        # setup network
        self._dispatcher = Dispatcher()

        thread_pool = ThreadPoolExecutor(max_workers=10)
        process_pool = ProcessPoolExecutor(max_workers=3)

        self._thread_pool = thread_pool
        self._process_pool = process_pool

        self._service = Interconnect(component_endpoint,
                                     self._dispatcher,
                                     secured=False,
                                     heartbeat=False,
                                     max_incoming_connections=20)

        executor = TransactionExecutor(service=self._service,
                                       context_manager=context_manager,
                                       config_view_factory=ConfigViewFactory(
                                           StateViewFactory(merkle_db)))
        self._executor = executor

        state_delta_processor = StateDeltaProcessor(self._service,
                                                    state_delta_store,
                                                    block_store)

        zmq_identity = hashlib.sha512(
            time.time().hex().encode()).hexdigest()[:23]

        network_thread_pool = ThreadPoolExecutor(max_workers=10)
        self._network_thread_pool = network_thread_pool

        self._network_dispatcher = Dispatcher()

        # Server public and private keys are hardcoded here due to
        # the decision to avoid having separate identities for each
        # validator's server socket. This is appropriate for a public
        # network. For a permissioned network with requirements for
        # server endpoint authentication at the network level, this can
        # be augmented with a local lookup service for side-band provided
        # endpoint, public_key pairs and a local configuration option
        # for 'server' side private keys.
        self._network = Interconnect(
            network_endpoint,
            dispatcher=self._network_dispatcher,
            zmq_identity=zmq_identity,
            secured=True,
            server_public_key=b'wFMwoOt>yFqI/ek.G[tfMMILHWw#vXB[Sv}>l>i)',
            server_private_key=b'r&oJ5aQDj4+V]p2:Lz70Eu0x#m%IwzBdP(}&hWM*',
            heartbeat=True,
            public_uri=public_uri,
            connection_timeout=30,
            max_incoming_connections=100)

        self._gossip = Gossip(self._network,
                              public_uri=public_uri,
                              peering_mode=peering,
                              initial_join_endpoints=join_list,
                              initial_peer_endpoints=peer_list,
                              minimum_peer_connectivity=3,
                              maximum_peer_connectivity=10,
                              topology_check_frequency=1)

        completer = Completer(block_store, self._gossip)

        block_sender = BroadcastBlockSender(completer, self._gossip)
        batch_sender = BroadcastBatchSender(completer, self._gossip)
        chain_id_manager = ChainIdManager(data_dir)
        # Create and configure journal
        self._journal = Journal(
            block_store=block_store,
            state_view_factory=StateViewFactory(merkle_db),
            block_sender=block_sender,
            batch_sender=batch_sender,
            transaction_executor=executor,
            squash_handler=context_manager.get_squash_handler(),
            identity_signing_key=identity_signing_key,
            chain_id_manager=chain_id_manager,
            state_delta_processor=state_delta_processor,
            data_dir=data_dir,
            config_dir=config_dir,
            check_publish_block_frequency=0.1,
            block_cache_purge_frequency=30,
            block_cache_keep_time=300)

        self._genesis_controller = GenesisController(
            context_manager=context_manager,
            transaction_executor=executor,
            completer=completer,
            block_store=block_store,
            state_view_factory=state_view_factory,
            identity_key=identity_signing_key,
            data_dir=data_dir,
            config_dir=config_dir,
            chain_id_manager=chain_id_manager,
            batch_sender=batch_sender)

        responder = Responder(completer)

        completer.set_on_batch_received(self._journal.on_batch_received)
        completer.set_on_block_received(self._journal.on_block_received)

        self._dispatcher.add_handler(
            validator_pb2.Message.TP_STATE_GET_REQUEST,
            tp_state_handlers.TpStateGetHandler(context_manager), thread_pool)

        self._dispatcher.add_handler(
            validator_pb2.Message.TP_STATE_SET_REQUEST,
            tp_state_handlers.TpStateSetHandler(context_manager), thread_pool)

        self._dispatcher.add_handler(
            validator_pb2.Message.TP_REGISTER_REQUEST,
            processor_handlers.ProcessorRegisterHandler(executor.processors),
            thread_pool)

        self._dispatcher.add_handler(
            validator_pb2.Message.TP_UNREGISTER_REQUEST,
            processor_handlers.ProcessorUnRegisterHandler(executor.processors),
            thread_pool)

        # Set up base network handlers
        self._network_dispatcher.add_handler(
            validator_pb2.Message.NETWORK_PING, PingHandler(),
            network_thread_pool)

        self._network_dispatcher.add_handler(
            validator_pb2.Message.NETWORK_CONNECT,
            ConnectHandler(network=self._network), network_thread_pool)

        self._network_dispatcher.add_handler(
            validator_pb2.Message.NETWORK_DISCONNECT,
            DisconnectHandler(network=self._network), network_thread_pool)

        # Set up gossip handlers
        self._network_dispatcher.add_handler(
            validator_pb2.Message.GOSSIP_GET_PEERS_REQUEST,
            GetPeersRequestHandler(gossip=self._gossip), network_thread_pool)

        self._network_dispatcher.add_handler(
            validator_pb2.Message.GOSSIP_GET_PEERS_RESPONSE,
            GetPeersResponseHandler(gossip=self._gossip), network_thread_pool)

        self._network_dispatcher.add_handler(
            validator_pb2.Message.GOSSIP_REGISTER,
            PeerRegisterHandler(gossip=self._gossip), network_thread_pool)

        self._network_dispatcher.add_handler(
            validator_pb2.Message.GOSSIP_UNREGISTER,
            PeerUnregisterHandler(gossip=self._gossip), network_thread_pool)

        # GOSSIP_MESSAGE 1) Sends acknowledgement to the sender
        self._network_dispatcher.add_handler(
            validator_pb2.Message.GOSSIP_MESSAGE, GossipMessageHandler(),
            network_thread_pool)

        # GOSSIP_MESSAGE 2) Verifies signature
        self._network_dispatcher.add_handler(
            validator_pb2.Message.GOSSIP_MESSAGE,
            signature_verifier.GossipMessageSignatureVerifier(), process_pool)

        # GOSSIP_MESSAGE 3) Determines if we should broadcast the
        # message to our peers. It is important that this occur prior
        # to the sending of the message to the completer, as this step
        # relies on whether the  gossip message has previously been
        # seen by the validator to determine whether or not forwarding
        # should occur
        self._network_dispatcher.add_handler(
            validator_pb2.Message.GOSSIP_MESSAGE,
            GossipBroadcastHandler(gossip=self._gossip, completer=completer),
            network_thread_pool)

        # GOSSIP_MESSAGE 4) Send message to completer
        self._network_dispatcher.add_handler(
            validator_pb2.Message.GOSSIP_MESSAGE,
            CompleterGossipHandler(completer), network_thread_pool)

        self._network_dispatcher.add_handler(
            validator_pb2.Message.GOSSIP_BLOCK_REQUEST,
            BlockResponderHandler(responder, self._gossip),
            network_thread_pool)

        # GOSSIP_BLOCK_RESPONSE 1) Sends ack to the sender
        self._network_dispatcher.add_handler(
            validator_pb2.Message.GOSSIP_BLOCK_RESPONSE,
            GossipBlockResponseHandler(), network_thread_pool)

        # GOSSIP_BLOCK_RESPONSE 2) Verifies signature
        self._network_dispatcher.add_handler(
            validator_pb2.Message.GOSSIP_BLOCK_RESPONSE,
            signature_verifier.GossipBlockResponseSignatureVerifier(),
            process_pool)

        # GOSSIP_BLOCK_RESPONSE 3) Send message to completer
        self._network_dispatcher.add_handler(
            validator_pb2.Message.GOSSIP_BLOCK_RESPONSE,
            CompleterGossipBlockResponseHandler(completer),
            network_thread_pool)

        self._network_dispatcher.add_handler(
            validator_pb2.Message.GOSSIP_BLOCK_RESPONSE,
            ResponderBlockResponseHandler(responder, self._gossip),
            network_thread_pool)

        self._network_dispatcher.add_handler(
            validator_pb2.Message.GOSSIP_BATCH_BY_BATCH_ID_REQUEST,
            BatchByBatchIdResponderHandler(responder, self._gossip),
            network_thread_pool)

        self._network_dispatcher.add_handler(
            validator_pb2.Message.GOSSIP_BATCH_BY_TRANSACTION_ID_REQUEST,
            BatchByTransactionIdResponderHandler(responder, self._gossip),
            network_thread_pool)

        # GOSSIP_BATCH_RESPONSE 1) Sends ack to the sender
        self._network_dispatcher.add_handler(
            validator_pb2.Message.GOSSIP_BATCH_RESPONSE,
            GossipBatchResponseHandler(), network_thread_pool)

        # GOSSIP_BATCH_RESPONSE 2) Verifies signature
        self._network_dispatcher.add_handler(
            validator_pb2.Message.GOSSIP_BATCH_RESPONSE,
            signature_verifier.GossipBatchResponseSignatureVerifier(),
            process_pool)

        # GOSSIP_BATCH_RESPONSE 3) Send message to completer
        self._network_dispatcher.add_handler(
            validator_pb2.Message.GOSSIP_BATCH_RESPONSE,
            CompleterGossipBatchResponseHandler(completer),
            network_thread_pool)

        self._network_dispatcher.add_handler(
            validator_pb2.Message.GOSSIP_BATCH_RESPONSE,
            ResponderBatchResponseHandler(responder, self._gossip),
            network_thread_pool)

        self._dispatcher.add_handler(
            validator_pb2.Message.CLIENT_BATCH_SUBMIT_REQUEST,
            signature_verifier.BatchListSignatureVerifier(), process_pool)

        self._dispatcher.add_handler(
            validator_pb2.Message.CLIENT_BATCH_SUBMIT_REQUEST,
            CompleterBatchListBroadcastHandler(completer, self._gossip),
            thread_pool)

        self._dispatcher.add_handler(
            validator_pb2.Message.CLIENT_BATCH_SUBMIT_REQUEST,
            client_handlers.BatchSubmitFinisher(
                self._journal.get_block_store(), completer.batch_cache),
            thread_pool)

        self._dispatcher.add_handler(
            validator_pb2.Message.CLIENT_BATCH_STATUS_REQUEST,
            client_handlers.BatchStatusRequest(self._journal.get_block_store(),
                                               completer.batch_cache),
            thread_pool)

        self._dispatcher.add_handler(
            validator_pb2.Message.CLIENT_STATE_LIST_REQUEST,
            client_handlers.StateListRequest(merkle_db,
                                             self._journal.get_block_store()),
            thread_pool)

        self._dispatcher.add_handler(
            validator_pb2.Message.CLIENT_STATE_GET_REQUEST,
            client_handlers.StateGetRequest(merkle_db,
                                            self._journal.get_block_store()),
            thread_pool)

        self._dispatcher.add_handler(
            validator_pb2.Message.CLIENT_BLOCK_LIST_REQUEST,
            client_handlers.BlockListRequest(self._journal.get_block_store()),
            thread_pool)

        self._dispatcher.add_handler(
            validator_pb2.Message.CLIENT_BLOCK_GET_REQUEST,
            client_handlers.BlockGetRequest(self._journal.get_block_store()),
            thread_pool)

        self._dispatcher.add_handler(
            validator_pb2.Message.CLIENT_BATCH_LIST_REQUEST,
            client_handlers.BatchListRequest(self._journal.get_block_store()),
            thread_pool)

        self._dispatcher.add_handler(
            validator_pb2.Message.CLIENT_BATCH_GET_REQUEST,
            client_handlers.BatchGetRequest(self._journal.get_block_store()),
            thread_pool)

        self._dispatcher.add_handler(
            validator_pb2.Message.CLIENT_TRANSACTION_LIST_REQUEST,
            client_handlers.TransactionListRequest(
                self._journal.get_block_store()), thread_pool)

        self._dispatcher.add_handler(
            validator_pb2.Message.CLIENT_TRANSACTION_GET_REQUEST,
            client_handlers.TransactionGetRequest(
                self._journal.get_block_store()), thread_pool)

        self._dispatcher.add_handler(
            validator_pb2.Message.CLIENT_STATE_CURRENT_REQUEST,
            client_handlers.StateCurrentRequest(
                self._journal.get_current_root), thread_pool)

        # State Delta Subscription Handlers
        self._dispatcher.add_handler(
            validator_pb2.Message.STATE_DELTA_SUBSCRIBE_REQUEST,
            StateDeltaSubscriberValidationHandler(state_delta_processor),
            thread_pool)

        self._dispatcher.add_handler(
            validator_pb2.Message.STATE_DELTA_SUBSCRIBE_REQUEST,
            StateDeltaAddSubscriberHandler(state_delta_processor), thread_pool)

        self._dispatcher.add_handler(
            validator_pb2.Message.STATE_DELTA_UNSUBSCRIBE_REQUEST,
            StateDeltaUnsubscriberHandler(state_delta_processor), thread_pool)
예제 #17
0
    def __init__(self,
                 bind_network,
                 bind_component,
                 endpoint,
                 peering,
                 seeds_list,
                 peer_list,
                 data_dir,
                 config_dir,
                 identity_signing_key,
                 scheduler_type,
                 permissions,
                 network_public_key=None,
                 network_private_key=None,
                 roles=None,
                 metrics_registry=None):
        """Constructs a validator instance.

        Args:
            bind_network (str): the network endpoint
            bind_component (str): the component endpoint
            endpoint (str): the zmq-style URI of this validator's
                publically reachable endpoint
            peering (str): The type of peering approach. Either 'static'
                or 'dynamic'. In 'static' mode, no attempted topology
                buildout occurs -- the validator only attempts to initiate
                peering connections with endpoints specified in the
                peer_list. In 'dynamic' mode, the validator will first
                attempt to initiate peering connections with endpoints
                specified in the peer_list and then attempt to do a
                topology buildout starting with peer lists obtained from
                endpoints in the seeds_list. In either mode, the validator
                will accept incoming peer requests up to max_peers.
            seeds_list (list of str): a list of addresses to connect
                to in order to perform the initial topology buildout
            peer_list (list of str): a list of peer addresses
            data_dir (str): path to the data directory
            config_dir (str): path to the config directory
            identity_signing_key (str): key validator uses for signing
        """

        # -- Setup Global State Database and Factory -- #
        global_state_db_filename = os.path.join(
            data_dir, 'merkle-{}.lmdb'.format(bind_network[-2:]))
        LOGGER.debug('global state database file is %s',
                     global_state_db_filename)
        global_state_db = LMDBNoLockDatabase(global_state_db_filename, 'c')
        state_view_factory = StateViewFactory(global_state_db)

        # -- Setup State Delta Store -- #
        delta_db_filename = os.path.join(
            data_dir, 'state-deltas-{}.lmdb'.format(bind_network[-2:]))
        LOGGER.debug('state delta store file is %s', delta_db_filename)
        state_delta_db = LMDBNoLockDatabase(delta_db_filename, 'c')
        state_delta_store = StateDeltaStore(state_delta_db)

        # -- Setup Receipt Store -- #
        receipt_db_filename = os.path.join(
            data_dir, 'txn_receipts-{}.lmdb'.format(bind_network[-2:]))
        LOGGER.debug('txn receipt store file is %s', receipt_db_filename)
        receipt_db = LMDBNoLockDatabase(receipt_db_filename, 'c')
        receipt_store = TransactionReceiptStore(receipt_db)

        # -- Setup Block Store -- #
        block_db_filename = os.path.join(
            data_dir, 'block-{}.lmdb'.format(bind_network[-2:]))
        LOGGER.debug('block store file is %s', block_db_filename)
        block_db = LMDBNoLockDatabase(block_db_filename, 'c')
        block_store = BlockStore(block_db)
        block_cache = BlockCache(block_store,
                                 keep_time=300,
                                 purge_frequency=30)

        # -- Setup Thread Pools -- #
        component_thread_pool = InstrumentedThreadPoolExecutor(
            max_workers=10, name='Component')
        network_thread_pool = InstrumentedThreadPoolExecutor(max_workers=10,
                                                             name='Network')
        sig_pool = InstrumentedThreadPoolExecutor(max_workers=3,
                                                  name='Signature')

        # -- Setup Dispatchers -- #
        component_dispatcher = Dispatcher(metrics_registry=metrics_registry)
        network_dispatcher = Dispatcher(metrics_registry=metrics_registry)

        # -- Setup Services -- #
        component_service = Interconnect(bind_component,
                                         component_dispatcher,
                                         secured=False,
                                         heartbeat=False,
                                         max_incoming_connections=20,
                                         monitor=True,
                                         max_future_callback_workers=10,
                                         metrics_registry=metrics_registry)

        zmq_identity = hashlib.sha512(
            time.time().hex().encode()).hexdigest()[:23]

        secure = False
        if network_public_key is not None and network_private_key is not None:
            secure = True

        network_service = Interconnect(
            bind_network,
            dispatcher=network_dispatcher,
            zmq_identity=zmq_identity,
            secured=secure,
            server_public_key=network_public_key,
            server_private_key=network_private_key,
            heartbeat=True,
            public_endpoint=endpoint,
            connection_timeout=30,
            max_incoming_connections=100,
            max_future_callback_workers=10,
            authorize=True,
            public_key=signing.generate_public_key(identity_signing_key),
            priv_key=identity_signing_key,
            roles=roles,
            metrics_registry=metrics_registry)

        # -- Setup Transaction Execution Platform -- #
        context_manager = ContextManager(global_state_db, state_delta_store)

        batch_tracker = BatchTracker(block_store)

        executor = TransactionExecutor(
            service=component_service,
            context_manager=context_manager,
            settings_view_factory=SettingsViewFactory(state_view_factory),
            scheduler_type=scheduler_type,
            invalid_observers=[batch_tracker],
            metrics_registry=metrics_registry)

        component_service.set_check_connections(executor.check_connections)

        state_delta_processor = StateDeltaProcessor(component_service,
                                                    state_delta_store,
                                                    block_store)
        event_broadcaster = EventBroadcaster(component_service, block_store,
                                             receipt_store)

        # -- Setup P2P Networking -- #
        gossip = Gossip(network_service,
                        endpoint=endpoint,
                        peering_mode=peering,
                        initial_seed_endpoints=seeds_list,
                        initial_peer_endpoints=peer_list,
                        minimum_peer_connectivity=3,
                        maximum_peer_connectivity=10,
                        topology_check_frequency=1)

        completer = Completer(block_store, gossip)

        block_sender = BroadcastBlockSender(completer, gossip)
        batch_sender = BroadcastBatchSender(completer, gossip)
        chain_id_manager = ChainIdManager(data_dir)

        identity_view_factory = IdentityViewFactory(
            StateViewFactory(global_state_db))

        id_cache = IdentityCache(identity_view_factory,
                                 block_store.chain_head_state_root)

        # -- Setup Permissioning -- #
        permission_verifier = PermissionVerifier(
            permissions, block_store.chain_head_state_root, id_cache)

        identity_observer = IdentityObserver(to_update=id_cache.invalidate,
                                             forked=id_cache.forked)

        # -- Setup Journal -- #
        batch_injector_factory = DefaultBatchInjectorFactory(
            block_store=block_store,
            state_view_factory=state_view_factory,
            signing_key=identity_signing_key)

        block_publisher = BlockPublisher(
            transaction_executor=executor,
            block_cache=block_cache,
            state_view_factory=state_view_factory,
            block_sender=block_sender,
            batch_sender=batch_sender,
            squash_handler=context_manager.get_squash_handler(),
            chain_head=block_store.chain_head,
            identity_signing_key=identity_signing_key,
            data_dir=data_dir,
            config_dir=config_dir,
            permission_verifier=permission_verifier,
            check_publish_block_frequency=0.1,
            batch_observers=[batch_tracker],
            batch_injector_factory=batch_injector_factory,
            metrics_registry=metrics_registry)

        chain_controller = ChainController(
            block_sender=block_sender,
            block_cache=block_cache,
            state_view_factory=state_view_factory,
            transaction_executor=executor,
            chain_head_lock=block_publisher.chain_head_lock,
            on_chain_updated=block_publisher.on_chain_updated,
            squash_handler=context_manager.get_squash_handler(),
            chain_id_manager=chain_id_manager,
            identity_signing_key=identity_signing_key,
            data_dir=data_dir,
            config_dir=config_dir,
            permission_verifier=permission_verifier,
            chain_observers=[
                state_delta_processor, event_broadcaster, receipt_store,
                batch_tracker, identity_observer
            ],
            metrics_registry=metrics_registry)

        genesis_controller = GenesisController(
            context_manager=context_manager,
            transaction_executor=executor,
            completer=completer,
            block_store=block_store,
            state_view_factory=state_view_factory,
            identity_key=identity_signing_key,
            data_dir=data_dir,
            config_dir=config_dir,
            chain_id_manager=chain_id_manager,
            batch_sender=batch_sender)

        responder = Responder(completer)

        completer.set_on_batch_received(block_publisher.queue_batch)
        completer.set_on_block_received(chain_controller.queue_block)

        # -- Register Message Handler -- #
        network_handlers.add(network_dispatcher, network_service, gossip,
                             completer, responder, network_thread_pool,
                             sig_pool, permission_verifier)

        component_handlers.add(
            component_dispatcher, gossip, context_manager, executor, completer,
            block_store, batch_tracker, global_state_db,
            self.get_chain_head_state_root_hash, receipt_store,
            state_delta_processor, state_delta_store, event_broadcaster,
            permission_verifier, component_thread_pool, sig_pool)

        # -- Store Object References -- #
        self._component_dispatcher = component_dispatcher
        self._component_service = component_service
        self._component_thread_pool = component_thread_pool

        self._network_dispatcher = network_dispatcher
        self._network_service = network_service
        self._network_thread_pool = network_thread_pool

        self._sig_pool = sig_pool

        self._context_manager = context_manager
        self._executor = executor
        self._genesis_controller = genesis_controller
        self._gossip = gossip

        self._block_publisher = block_publisher
        self._chain_controller = chain_controller
예제 #18
0
    def __init__(self, bind_network, bind_component, endpoint,
                 peering, seeds_list, peer_list, data_dir, config_dir,
                 identity_signing_key, scheduler_type,
                 network_public_key=None,
                 network_private_key=None):
        """Constructs a validator instance.

        Args:
            bind_network (str): the network endpoint
            bind_component (str): the component endpoint
            endpoint (str): the zmq-style URI of this validator's
                publically reachable endpoint
            peering (str): The type of peering approach. Either 'static'
                or 'dynamic'. In 'static' mode, no attempted topology
                buildout occurs -- the validator only attempts to initiate
                peering connections with endpoints specified in the
                peer_list. In 'dynamic' mode, the validator will first
                attempt to initiate peering connections with endpoints
                specified in the peer_list and then attempt to do a
                topology buildout starting with peer lists obtained from
                endpoints in the seeds_list. In either mode, the validator
                will accept incoming peer requests up to max_peers.
            seeds_list (list of str): a list of addresses to connect
                to in order to perform the initial topology buildout
            peer_list (list of str): a list of peer addresses
            data_dir (str): path to the data directory
            config_dir (str): path to the config directory
            identity_signing_key (str): key validator uses for signing
        """
        db_filename = os.path.join(data_dir,
                                   'merkle-{}.lmdb'.format(
                                       bind_network[-2:]))
        LOGGER.debug('database file is %s', db_filename)

        merkle_db = LMDBNoLockDatabase(db_filename, 'c')

        delta_db_filename = os.path.join(data_dir,
                                         'state-deltas-{}.lmdb'.format(
                                             bind_network[-2:]))
        LOGGER.debug('state delta store file is %s', delta_db_filename)
        state_delta_db = LMDBNoLockDatabase(delta_db_filename, 'c')

        state_delta_store = StateDeltaStore(state_delta_db)

        context_manager = ContextManager(merkle_db, state_delta_store)
        self._context_manager = context_manager

        state_view_factory = StateViewFactory(merkle_db)

        block_db_filename = os.path.join(data_dir, 'block-{}.lmdb'.format(
                                         bind_network[-2:]))
        LOGGER.debug('block store file is %s', block_db_filename)

        block_db = LMDBNoLockDatabase(block_db_filename, 'c')
        block_store = BlockStore(block_db)

        batch_tracker = BatchTracker(block_store)
        block_store.add_update_observer(batch_tracker)

        # setup network
        self._dispatcher = Dispatcher()

        thread_pool = ThreadPoolExecutor(max_workers=10)
        sig_pool = ThreadPoolExecutor(max_workers=3)

        self._thread_pool = thread_pool
        self._sig_pool = sig_pool

        self._service = Interconnect(bind_component,
                                     self._dispatcher,
                                     secured=False,
                                     heartbeat=False,
                                     max_incoming_connections=20,
                                     monitor=True)

        config_file = os.path.join(config_dir, "validator.toml")

        validator_config = {}
        if os.path.exists(config_file):
            with open(config_file) as fd:
                raw_config = fd.read()
            validator_config = toml.loads(raw_config)

        if scheduler_type is None:
            scheduler_type = validator_config.get("scheduler", "serial")

        executor = TransactionExecutor(
            service=self._service,
            context_manager=context_manager,
            settings_view_factory=SettingsViewFactory(
                                    StateViewFactory(merkle_db)),
            scheduler_type=scheduler_type,
            invalid_observers=[batch_tracker])

        self._executor = executor
        self._service.set_check_connections(executor.check_connections)

        state_delta_processor = StateDeltaProcessor(self._service,
                                                    state_delta_store,
                                                    block_store)

        zmq_identity = hashlib.sha512(
            time.time().hex().encode()).hexdigest()[:23]

        network_thread_pool = ThreadPoolExecutor(max_workers=10)
        self._network_thread_pool = network_thread_pool

        self._network_dispatcher = Dispatcher()

        secure = False
        if network_public_key is not None and network_private_key is not None:
            secure = True

        self._network = Interconnect(
            bind_network,
            dispatcher=self._network_dispatcher,
            zmq_identity=zmq_identity,
            secured=secure,
            server_public_key=network_public_key,
            server_private_key=network_private_key,
            heartbeat=True,
            public_endpoint=endpoint,
            connection_timeout=30,
            max_incoming_connections=100)

        self._gossip = Gossip(self._network,
                              endpoint=endpoint,
                              peering_mode=peering,
                              initial_seed_endpoints=seeds_list,
                              initial_peer_endpoints=peer_list,
                              minimum_peer_connectivity=3,
                              maximum_peer_connectivity=10,
                              topology_check_frequency=1)

        completer = Completer(block_store, self._gossip)

        block_sender = BroadcastBlockSender(completer, self._gossip)
        batch_sender = BroadcastBatchSender(completer, self._gossip)
        chain_id_manager = ChainIdManager(data_dir)
        # Create and configure journal
        self._journal = Journal(
            block_store=block_store,
            state_view_factory=StateViewFactory(merkle_db),
            block_sender=block_sender,
            batch_sender=batch_sender,
            transaction_executor=executor,
            squash_handler=context_manager.get_squash_handler(),
            identity_signing_key=identity_signing_key,
            chain_id_manager=chain_id_manager,
            state_delta_processor=state_delta_processor,
            data_dir=data_dir,
            config_dir=config_dir,
            check_publish_block_frequency=0.1,
            block_cache_purge_frequency=30,
            block_cache_keep_time=300,
            batch_observers=[batch_tracker]
        )

        self._genesis_controller = GenesisController(
            context_manager=context_manager,
            transaction_executor=executor,
            completer=completer,
            block_store=block_store,
            state_view_factory=state_view_factory,
            identity_key=identity_signing_key,
            data_dir=data_dir,
            config_dir=config_dir,
            chain_id_manager=chain_id_manager,
            batch_sender=batch_sender
        )

        responder = Responder(completer)

        completer.set_on_batch_received(self._journal.on_batch_received)
        completer.set_on_block_received(self._journal.on_block_received)

        self._dispatcher.add_handler(
            validator_pb2.Message.TP_STATE_GET_REQUEST,
            tp_state_handlers.TpStateGetHandler(context_manager),
            thread_pool)

        self._dispatcher.add_handler(
            validator_pb2.Message.TP_STATE_SET_REQUEST,
            tp_state_handlers.TpStateSetHandler(context_manager),
            thread_pool)

        self._dispatcher.add_handler(
            validator_pb2.Message.TP_REGISTER_REQUEST,
            processor_handlers.ProcessorRegisterHandler(executor.processors),
            thread_pool)

        self._dispatcher.add_handler(
            validator_pb2.Message.TP_UNREGISTER_REQUEST,
            processor_handlers.ProcessorUnRegisterHandler(executor.processors),
            thread_pool)

        # Set up base network handlers
        self._network_dispatcher.add_handler(
            validator_pb2.Message.NETWORK_PING,
            PingHandler(),
            network_thread_pool)

        self._network_dispatcher.add_handler(
            validator_pb2.Message.NETWORK_CONNECT,
            ConnectHandler(network=self._network),
            network_thread_pool)

        self._network_dispatcher.add_handler(
            validator_pb2.Message.NETWORK_DISCONNECT,
            DisconnectHandler(network=self._network),
            network_thread_pool)

        # Set up gossip handlers
        self._network_dispatcher.add_handler(
            validator_pb2.Message.GOSSIP_GET_PEERS_REQUEST,
            GetPeersRequestHandler(gossip=self._gossip),
            network_thread_pool)

        self._network_dispatcher.add_handler(
            validator_pb2.Message.GOSSIP_GET_PEERS_RESPONSE,
            GetPeersResponseHandler(gossip=self._gossip),
            network_thread_pool)

        self._network_dispatcher.add_handler(
            validator_pb2.Message.GOSSIP_REGISTER,
            PeerRegisterHandler(gossip=self._gossip),
            network_thread_pool)

        self._network_dispatcher.add_handler(
            validator_pb2.Message.GOSSIP_UNREGISTER,
            PeerUnregisterHandler(gossip=self._gossip),
            network_thread_pool)

        # GOSSIP_MESSAGE 1) Sends acknowledgement to the sender
        self._network_dispatcher.add_handler(
            validator_pb2.Message.GOSSIP_MESSAGE,
            GossipMessageHandler(),
            network_thread_pool)

        # GOSSIP_MESSAGE 2) Verifies signature
        self._network_dispatcher.add_handler(
            validator_pb2.Message.GOSSIP_MESSAGE,
            signature_verifier.GossipMessageSignatureVerifier(),
            sig_pool)

        # GOSSIP_MESSAGE 3) Verifies batch structure
        self._network_dispatcher.add_handler(
            validator_pb2.Message.GOSSIP_MESSAGE,
            structure_verifier.GossipHandlerStructureVerifier(),
            network_thread_pool)

        # GOSSIP_MESSAGE 4) Determines if we should broadcast the
        # message to our peers. It is important that this occur prior
        # to the sending of the message to the completer, as this step
        # relies on whether the  gossip message has previously been
        # seen by the validator to determine whether or not forwarding
        # should occur
        self._network_dispatcher.add_handler(
            validator_pb2.Message.GOSSIP_MESSAGE,
            GossipBroadcastHandler(
                gossip=self._gossip,
                completer=completer),
            network_thread_pool)

        # GOSSIP_MESSAGE 5) Send message to completer
        self._network_dispatcher.add_handler(
            validator_pb2.Message.GOSSIP_MESSAGE,
            CompleterGossipHandler(
                completer),
            network_thread_pool)

        self._network_dispatcher.add_handler(
            validator_pb2.Message.GOSSIP_BLOCK_REQUEST,
            BlockResponderHandler(responder, self._gossip),
            network_thread_pool)

        # GOSSIP_BLOCK_RESPONSE 1) Sends ack to the sender
        self._network_dispatcher.add_handler(
            validator_pb2.Message.GOSSIP_BLOCK_RESPONSE,
            GossipBlockResponseHandler(),
            network_thread_pool)

        # GOSSIP_BLOCK_RESPONSE 2) Verifies signature
        self._network_dispatcher.add_handler(
            validator_pb2.Message.GOSSIP_BLOCK_RESPONSE,
            signature_verifier.GossipBlockResponseSignatureVerifier(),
            sig_pool)

        # GOSSIP_BLOCK_RESPONSE 3) Check batch structure
        self._network_dispatcher.add_handler(
            validator_pb2.Message.GOSSIP_BLOCK_RESPONSE,
            structure_verifier.GossipBlockResponseStructureVerifier(),
            network_thread_pool)

        # GOSSIP_BLOCK_RESPONSE 4) Send message to completer
        self._network_dispatcher.add_handler(
            validator_pb2.Message.GOSSIP_BLOCK_RESPONSE,
            CompleterGossipBlockResponseHandler(
                completer),
            network_thread_pool)

        self._network_dispatcher.add_handler(
            validator_pb2.Message.GOSSIP_BLOCK_RESPONSE,
            ResponderBlockResponseHandler(responder, self._gossip),
            network_thread_pool)

        self._network_dispatcher.add_handler(
            validator_pb2.Message.GOSSIP_BATCH_BY_BATCH_ID_REQUEST,
            BatchByBatchIdResponderHandler(responder, self._gossip),
            network_thread_pool)

        self._network_dispatcher.add_handler(
            validator_pb2.Message.GOSSIP_BATCH_BY_TRANSACTION_ID_REQUEST,
            BatchByTransactionIdResponderHandler(responder, self._gossip),
            network_thread_pool)

        # GOSSIP_BATCH_RESPONSE 1) Sends ack to the sender
        self._network_dispatcher.add_handler(
            validator_pb2.Message.GOSSIP_BATCH_RESPONSE,
            GossipBatchResponseHandler(),
            network_thread_pool)

        # GOSSIP_BATCH_RESPONSE 2) Verifies signature
        self._network_dispatcher.add_handler(
            validator_pb2.Message.GOSSIP_BATCH_RESPONSE,
            signature_verifier.GossipBatchResponseSignatureVerifier(),
            sig_pool)

        # GOSSIP_BATCH_RESPONSE 3) Check batch structure
        self._network_dispatcher.add_handler(
            validator_pb2.Message.GOSSIP_BATCH_RESPONSE,
            structure_verifier.GossipBatchResponseStructureVerifier(),
            network_thread_pool)

        # GOSSIP_BATCH_RESPONSE 4) Send message to completer
        self._network_dispatcher.add_handler(
            validator_pb2.Message.GOSSIP_BATCH_RESPONSE,
            CompleterGossipBatchResponseHandler(
                completer),
            network_thread_pool)

        self._network_dispatcher.add_handler(
            validator_pb2.Message.GOSSIP_BATCH_RESPONSE,
            ResponderBatchResponseHandler(responder, self._gossip),
            network_thread_pool)

        self._dispatcher.add_handler(
            validator_pb2.Message.CLIENT_BATCH_SUBMIT_REQUEST,
            BatchListPermissionVerifier(
                settings_view_factory=SettingsViewFactory(
                    StateViewFactory(merkle_db)),
                current_root_func=self._journal.get_current_root,
            ),
            sig_pool)

        self._dispatcher.add_handler(
            validator_pb2.Message.CLIENT_BATCH_SUBMIT_REQUEST,
            signature_verifier.BatchListSignatureVerifier(),
            sig_pool)

        self._dispatcher.add_handler(
            validator_pb2.Message.CLIENT_BATCH_SUBMIT_REQUEST,
            structure_verifier.BatchListStructureVerifier(),
            network_thread_pool)

        self._dispatcher.add_handler(
            validator_pb2.Message.CLIENT_BATCH_SUBMIT_REQUEST,
            CompleterBatchListBroadcastHandler(
                completer, self._gossip),
            thread_pool)

        self._dispatcher.add_handler(
            validator_pb2.Message.CLIENT_BATCH_SUBMIT_REQUEST,
            client_handlers.BatchSubmitFinisher(batch_tracker),
            thread_pool)

        self._dispatcher.add_handler(
            validator_pb2.Message.CLIENT_BATCH_STATUS_REQUEST,
            client_handlers.BatchStatusRequest(batch_tracker),
            thread_pool)

        self._dispatcher.add_handler(
            validator_pb2.Message.CLIENT_STATE_LIST_REQUEST,
            client_handlers.StateListRequest(
                merkle_db,
                self._journal.get_block_store()),
            thread_pool)

        self._dispatcher.add_handler(
            validator_pb2.Message.CLIENT_STATE_GET_REQUEST,
            client_handlers.StateGetRequest(
                merkle_db,
                self._journal.get_block_store()),
            thread_pool)

        self._dispatcher.add_handler(
            validator_pb2.Message.CLIENT_BLOCK_LIST_REQUEST,
            client_handlers.BlockListRequest(self._journal.get_block_store()),
            thread_pool)

        self._dispatcher.add_handler(
            validator_pb2.Message.CLIENT_BLOCK_GET_REQUEST,
            client_handlers.BlockGetRequest(self._journal.get_block_store()),
            thread_pool)

        self._dispatcher.add_handler(
            validator_pb2.Message.CLIENT_BATCH_LIST_REQUEST,
            client_handlers.BatchListRequest(self._journal.get_block_store()),
            thread_pool)

        self._dispatcher.add_handler(
            validator_pb2.Message.CLIENT_BATCH_GET_REQUEST,
            client_handlers.BatchGetRequest(self._journal.get_block_store()),
            thread_pool)

        self._dispatcher.add_handler(
            validator_pb2.Message.CLIENT_TRANSACTION_LIST_REQUEST,
            client_handlers.TransactionListRequest(
                self._journal.get_block_store()),
            thread_pool)

        self._dispatcher.add_handler(
            validator_pb2.Message.CLIENT_TRANSACTION_GET_REQUEST,
            client_handlers.TransactionGetRequest(
                self._journal.get_block_store()),
            thread_pool)

        self._dispatcher.add_handler(
            validator_pb2.Message.CLIENT_STATE_CURRENT_REQUEST,
            client_handlers.StateCurrentRequest(
                self._journal.get_current_root), thread_pool)

        # State Delta Subscription Handlers
        self._dispatcher.add_handler(
            validator_pb2.Message.STATE_DELTA_SUBSCRIBE_REQUEST,
            StateDeltaSubscriberValidationHandler(state_delta_processor),
            thread_pool)

        self._dispatcher.add_handler(
            validator_pb2.Message.STATE_DELTA_SUBSCRIBE_REQUEST,
            StateDeltaAddSubscriberHandler(state_delta_processor),
            thread_pool)

        self._dispatcher.add_handler(
            validator_pb2.Message.STATE_DELTA_UNSUBSCRIBE_REQUEST,
            StateDeltaUnsubscriberHandler(state_delta_processor),
            thread_pool)

        self._dispatcher.add_handler(
            validator_pb2.Message.STATE_DELTA_GET_EVENTS_REQUEST,
            StateDeltaGetEventsHandler(block_store, state_delta_store),
            thread_pool)