Esempio n. 1
0
class Messenger(object):
    def __init__(self, validator_url):
        self._connection = Connection(validator_url)
        self._context = create_context('secp256k1')
        self._crypto_factory = CryptoFactory(self._context)
        self._batch_signer = self._crypto_factory.new_signer(
            self._context.new_random_private_key())

    def open_validator_connection(self):
        self._connection.open()

    def close_validator_connection(self):
        self._connection.close()

    def get_new_key_pair(self):
        private_key = self._context.new_random_private_key()
        public_key = self._context.get_public_key(private_key)
        return public_key.as_hex(), private_key.as_hex()

    async def send_create_agent_transaction(self,
                                            private_key,
                                            name,
                                            timestamp):
        transaction_signer = self._crypto_factory.new_signer(
            secp256k1.Secp256k1PrivateKey.from_hex(private_key))

        batch = make_create_agent_transaction(
            transaction_signer=transaction_signer,
            batch_signer=self._batch_signer,
            name=name,
            timestamp=timestamp)
        await self._send_and_wait_for_commit(batch)

    async def _send_and_wait_for_commit(self, batch):
        # Send transaction to validator
        submit_request = client_batch_submit_pb2.ClientBatchSubmitRequest(
            batches=[batch])
        await self._connection.send(
            validator_pb2.Message.CLIENT_BATCH_SUBMIT_REQUEST,
            submit_request.SerializeToString())

        # Send status request to validator
        batch_id = batch.header_signature
        status_request = client_batch_submit_pb2.ClientBatchStatusRequest(
            batch_ids=[batch_id], wait=True)
        validator_response = await self._connection.send(
            validator_pb2.Message.CLIENT_BATCH_STATUS_REQUEST,
            status_request.SerializeToString())

        # Parse response
        status_response = client_batch_submit_pb2.ClientBatchStatusResponse()
        status_response.ParseFromString(validator_response.content)
        status = status_response.batch_statuses[0].status
        if status == client_batch_submit_pb2.ClientBatchStatus.INVALID:
            error = status_response.batch_statuses[0].invalid_transactions[0]
            raise ApiBadRequest(error.message)
        elif status == client_batch_submit_pb2.ClientBatchStatus.PENDING:
            raise ApiInternalError('Transaction submitted but timed out')
        elif status == client_batch_submit_pb2.ClientBatchStatus.UNKNOWN:
            raise ApiInternalError('Something went wrong. Try again later')
Esempio n. 2
0
    def __init__(self, delegate, args):
        super(IntKeyWorkload, self).__init__(delegate, args)
        self._auth_info = args.auth_info
        self._urls = []
        self._pending_batches = {}
        self._lock = threading.Lock()
        self._delegate = delegate
        self._deps = {}
        context = create_context('secp256k1')
        crypto_factory = CryptoFactory(context=context)
        if args.key_file is not None:
            try:
                with open(args.key_file, 'r') as infile:
                    signing_key = infile.read().strip()
                private_key = Secp256k1PrivateKey.from_hex(signing_key)

                self._signer = crypto_factory.new_signer(
                    private_key=private_key)
            except ParseError as pe:
                raise IntKeyCliException(str(pe))
            except IOError as ioe:
                raise IntKeyCliException(str(ioe))
        else:
            self._signer = crypto_factory.new_signer(
                context.new_random_private_key())
Esempio n. 3
0
    def __init__(self, delegate, args):
        super(IntKeyWorkload, self).__init__(delegate, args)
        self._auth_info = args.auth_info
        self._urls = []
        self._pending_batches = {}
        self._lock = threading.Lock()
        self._delegate = delegate
        self._deps = {}
        context = create_context('secp256k1')
        crypto_factory = CryptoFactory(context=context)
        if args.key_file is not None:
            try:
                with open(args.key_file, 'r') as infile:
                    signing_key = infile.read().strip()
                private_key = Secp256k1PrivateKey.from_hex(signing_key)

                self._signer = crypto_factory.new_signer(
                    private_key=private_key)
            except ParseError as pe:
                raise IntKeyCliException(str(pe))
            except IOError as ioe:
                raise IntKeyCliException(str(ioe))
        else:
            self._signer = crypto_factory.new_signer(
                context.new_random_private_key())
    def __init__(self, with_genesis=True):
        self.block_sender = MockBlockSender()
        self.batch_sender = MockBatchSender()
        self.block_store = BlockStore(
            DictDatabase(indexes=BlockStore.create_index_configuration()))
        self.block_cache = BlockCache(self.block_store)
        self.state_db = {}

        self.block_manager = BlockManager()
        self.block_manager.add_store("commit_store", self.block_store)

        # add the mock reference to the consensus
        consensus_setting_addr = SettingsView.setting_address(
            'sawtooth.consensus.algorithm')
        self.state_db[consensus_setting_addr] = _setting_entry(
            'sawtooth.consensus.algorithm', 'test_journal.mock_consensus')

        self.state_view_factory = MockStateViewFactory(self.state_db)
        context = create_context('secp256k1')
        private_key = context.new_random_private_key()
        crypto_factory = CryptoFactory(context)
        self.signer = crypto_factory.new_signer(private_key)

        identity_private_key = context.new_random_private_key()
        self.identity_signer = crypto_factory.new_signer(identity_private_key)
        chain_head = None
        if with_genesis:
            self.genesis_block = self.generate_genesis_block()
            chain_head = self.genesis_block
            self.block_manager.put([chain_head.block])
            self.block_manager.persist(chain_head.block.header_signature,
                                       "commit_store")

        self.block_publisher = BlockPublisher(
            block_manager=self.block_manager,
            transaction_executor=MockTransactionExecutor(),
            transaction_committed=self.block_store.has_transaction,
            batch_committed=self.block_store.has_batch,
            state_view_factory=self.state_view_factory,
            settings_cache=SettingsCache(
                SettingsViewFactory(self.state_view_factory), ),
            block_sender=self.block_sender,
            batch_sender=self.block_sender,
            chain_head=chain_head.block,
            identity_signer=self.identity_signer,
            data_dir=None,
            config_dir=None,
            permission_verifier=MockPermissionVerifier(),
            batch_observers=[])
Esempio n. 5
0
    def __init__(self, with_genesis=True):
        self.block_sender = MockBlockSender()
        self.batch_sender = MockBatchSender()
        self.dir = tempfile.mkdtemp()
        self.block_db = NativeLmdbDatabase(
            os.path.join(self.dir, 'block.lmdb'),
            BlockStore.create_index_configuration())
        self.block_store = BlockStore(self.block_db)
        self.block_cache = BlockCache(self.block_store)
        self.state_db = NativeLmdbDatabase(
            os.path.join(self.dir, "merkle.lmdb"),
            MerkleDatabase.create_index_configuration())

        self.state_view_factory = NativeStateViewFactory(self.state_db)

        self.block_manager = BlockManager()
        self.block_manager.add_commit_store(self.block_store)

        context = create_context('secp256k1')
        private_key = context.new_random_private_key()
        crypto_factory = CryptoFactory(context)
        self.signer = crypto_factory.new_signer(private_key)

        identity_private_key = context.new_random_private_key()
        self.identity_signer = crypto_factory.new_signer(identity_private_key)
        chain_head = None
        if with_genesis:
            self.genesis_block = self.generate_genesis_block()
            chain_head = self.genesis_block
            self.block_manager.put([chain_head.block])
            self.block_manager.persist(
                chain_head.block.header_signature,
                "commit_store")

        self.block_publisher = BlockPublisher(
            block_manager=self.block_manager,
            transaction_executor=MockTransactionExecutor(),
            transaction_committed=self.block_store.has_transaction,
            batch_committed=self.block_store.has_batch,
            state_view_factory=self.state_view_factory,
            block_sender=self.block_sender,
            batch_sender=self.block_sender,
            chain_head=chain_head.block,
            identity_signer=self.identity_signer,
            data_dir=None,
            config_dir=None,
            permission_verifier=MockPermissionVerifier(),
            batch_observers=[])
def do_authorization_challenge_submit():
    """
    Test the AuthorizationChallengeSubmitHandler returns an
    AuthorizationChallengeResult.
    """
    context = create_context('secp256k1')
    private_key = context.new_random_private_key()
    crypto_factory = CryptoFactory(context)
    signer = crypto_factory.new_signer(private_key)

    payload = os.urandom(10)

    signature = signer.sign(payload)

    auth_challenge_submit = AuthorizationChallengeSubmit(
        public_key=signer.get_public_key().as_hex(),
        signature=signature,
        roles=[RoleType.Value("NETWORK")])

    roles = {"network": AuthorizationType.TRUST}

    network = MockNetwork(roles,
                          connection_status={
                              "connection_id":
                              ConnectionStatus.AUTH_CHALLENGE_REQUEST
                          })
    permission_verifer = MockPermissionVerifier()
    gossip = MockGossip()
    handler = AuthorizationChallengeSubmitHandler(network, permission_verifer,
                                                  gossip,
                                                  {"connection_id": payload})
    handler_status = handler.handle("connection_id",
                                    auth_challenge_submit.SerializeToString())

    return handler_status
Esempio n. 7
0
def _read_signer(key_filename):
    """Reads the given file as a hex, or (as a fallback) a WIF formatted key.

    Args:
        key_filename: The filename where the key is stored. If None,
            defaults to the default key for the current user.

    Returns:
        Signer: the signer

    Raises:
        CliException: If unable to read the file.
    """
    filename = key_filename
    if filename is None:
        filename = os.path.join(os.path.expanduser('~'), '.sawtooth', 'keys',
                                getpass.getuser() + '.priv')

    try:
        with open(filename, 'r') as key_file:
            signing_key = key_file.read().strip()
    except IOError as e:
        raise CliException('Unable to read key file: {}'.format(str(e)))

    try:
        private_key = Secp256k1PrivateKey.from_hex(signing_key)
    except ParseError as e:
        try:
            private_key = Secp256k1PrivateKey.from_wif(signing_key)
        except ParseError:
            raise CliException('Unable to read key in file: {}'.format(str(e)))

    context = create_context('secp256k1')
    crypto_factory = CryptoFactory(context)
    return crypto_factory.new_signer(private_key)
Esempio n. 8
0
    def test_authorization_challenge_submit_bad_last_message(self):
        """
        Test the AuthorizationChallengeSubmitHandler returns an
        AuthorizationViolation and closes the connection if the last message
        was not AuthorizaitonChallengeRequest.
        """
        context = create_context('secp256k1')
        private_key = context.new_random_private_key()
        crypto_factory = CryptoFactory(context)
        signer = crypto_factory.new_signer(private_key)

        payload = os.urandom(10)

        signature = signer.sign(payload)

        auth_challenge_submit = AuthorizationChallengeSubmit(
            public_key=signer.get_public_key().as_hex(),
            signature=signature,
            roles=[RoleType.Value("NETWORK")])

        roles = {"network": AuthorizationType.TRUST}

        network = MockNetwork(roles,
                              connection_status={"connection_id": "other"})
        permission_verifer = MockPermissionVerifier()
        gossip = MockGossip()
        handler = AuthorizationChallengeSubmitHandler(
            network, permission_verifer, gossip, {"connection_id": payload})
        handler_status = handler.handle(
            "connection_id", auth_challenge_submit.SerializeToString())
        self.assertEqual(handler_status.status, HandlerStatus.RETURN_AND_CLOSE)
        self.assertEqual(handler_status.message_type,
                         validator_pb2.Message.AUTHORIZATION_VIOLATION)
Esempio n. 9
0
def _load_identity_signer(key_dir, key_name):
    """Loads a private key from the key directory, based on a validator's
    identity.

    Args:
        key_dir (str): The path to the key directory.
        key_name (str): The name of the key to load.

    Returns:
        Signer: the cryptographic signer for the key
    """
    key_path = os.path.join(key_dir, '{}.priv'.format(key_name))

    if not os.path.exists(key_path):
        raise Exception("No such signing key file: {}".format(key_path))
    if not os.access(key_path, os.R_OK):
        raise Exception("Key file is not readable: {}".format(key_path))

    LOGGER.info('Loading signing key: %s', key_path)
    try:
        with open(key_path, 'r') as key_file:
            private_key_str = key_file.read().strip()
    except IOError as e:
        raise Exception("Could not load key file: {}".format(str(e)))

    try:
        private_key = Secp256k1PrivateKey.from_hex(private_key_str)
    except signing.ParseError as e:
        raise Exception("Invalid key in file {}: {}".format(key_path, str(e)))

    context = signing.create_context('secp256k1')
    crypto_factory = CryptoFactory(context)
    return crypto_factory.new_signer(private_key)
Esempio n. 10
0
def _read_signer(key_filename):
    """Reads the given file as a hex key.

    Args:
        key_filename: The filename where the key is stored. If None,
            defaults to the default key for the current user.

    Returns:
        Signer: the signer

    Raises:
        CliException: If unable to read the file.
    """
    filename = key_filename
    if filename is None:
        filename = os.path.join(config.get_key_dir(), 'validator.priv')

    try:
        with open(filename, 'r') as key_file:
            signing_key = key_file.read().strip()
    except IOError as e:
        raise CliException('Unable to read key file: {}'.format(str(e)))

    try:
        private_key = Secp256k1PrivateKey.from_hex(signing_key)
    except ParseError as e:
        raise CliException('Unable to read key in file: {}'.format(str(e)))

    context = create_context('secp256k1')
    crypto_factory = CryptoFactory(context)
    return crypto_factory.new_signer(private_key)
Esempio n. 11
0
def _read_signer(key_filename):
    """Reads the given file as a hex key.

    Args:
        key_filename: The filename where the key is stored. If None,
            defaults to the default key for the current user.

    Returns:
        Signer: the signer

    Raises:
        CliException: If unable to read the file.
    """
    filename = key_filename
    if filename is None:
        filename = os.path.join(os.path.expanduser('~'),
                                '.sawtooth',
                                'keys',
                                getpass.getuser() + '.priv')

    try:
        with open(filename, 'r') as key_file:
            signing_key = key_file.read().strip()
    except IOError as e:
        raise CliException('Unable to read key file: {}'.format(str(e)))

    try:
        private_key = Secp256k1PrivateKey.from_hex(signing_key)
    except ParseError as e:
        raise CliException('Unable to read key in file: {}'.format(str(e)))

    context = create_context('secp256k1')
    crypto_factory = CryptoFactory(context)
    return crypto_factory.new_signer(private_key)
Esempio n. 12
0
    def setUp(self):
        self.dir = tempfile.mkdtemp()
        self.block_db = NativeLmdbDatabase(
            os.path.join(self.dir, 'block.lmdb'),
            BlockStore.create_index_configuration())
        self.block_store = BlockStore(self.block_db)
        self.block_manager = BlockManager()
        self.block_manager.add_commit_store(self.block_store)
        self.gossip = MockGossip()
        self.completer = Completer(
            block_manager=self.block_manager,
            transaction_committed=self.block_store.has_transaction,
            get_committed_batch_by_id=self.block_store.get_batch,
            get_committed_batch_by_txn_id=(
                self.block_store.get_batch_by_transaction
            ),
            get_chain_head=lambda: self.block_store.chain_head,
            gossip=self.gossip)
        self.completer.set_on_block_received(self._on_block_received)
        self.completer.set_on_batch_received(self._on_batch_received)
        self._has_block_value = True

        context = create_context('secp256k1')
        private_key = context.new_random_private_key()
        crypto_factory = CryptoFactory(context)
        self.signer = crypto_factory.new_signer(private_key)

        self.blocks = []
        self.batches = []
def do_populate(batches, keys):
    context = create_context('secp256k1')
    private_key = context.new_random_private_key()
    crypto_factory = CryptoFactory(context)
    signer = crypto_factory.new_signer(private_key)

    total_txn_count = 0
    txns = []
    for i in range(0, len(keys)):
        name = list(keys)[i]
        txn = create_intkey_transaction(
            verb='set',
            name=name,
            value=random.randint(9000, 100000),
            deps=[],
            signer=signer)
        total_txn_count += 1
        txns.append(txn)
        # Establish the signature of the txn associated with the word
        # so we can create good dependencies later
        keys[name] = txn.header_signature

    batch = create_batch(
        transactions=txns,
        signer=signer)

    batches.append(batch)
Esempio n. 14
0
    def setUp(self):
        self.block_store = BlockStore(
            DictDatabase(indexes=BlockStore.create_index_configuration()))
        self.block_manager = BlockManager()
        self.block_manager.add_store("commit_store", self.block_store)
        self.gossip = MockGossip()
        self.completer = Completer(
            block_manager=self.block_manager,
            transaction_committed=self.block_store.has_transaction,
            get_committed_batch_by_id=self.block_store.get_batch,
            get_committed_batch_by_txn_id=(
                self.block_store.get_batch_by_transaction),
            get_chain_head=lambda: self.block_store.chain_head,
            gossip=self.gossip)
        self.completer.set_on_block_received(self._on_block_received)
        self.completer.set_on_batch_received(self._on_batch_received)
        self._has_block_value = True

        context = create_context('secp256k1')
        private_key = context.new_random_private_key()
        crypto_factory = CryptoFactory(context)
        self.signer = crypto_factory.new_signer(private_key)

        self.blocks = []
        self.batches = []
Esempio n. 15
0
def create_chain(num=10):
    context = create_context('secp256k1')
    private_key = context.new_random_private_key()
    crypto_factory = CryptoFactory(context)
    signer = crypto_factory.new_signer(private_key)

    counter = 1
    previous_block_id = "0000000000000000"
    blocks = []
    while counter <= num:
        current_block_id = uuid4().hex
        txns = [
            t[0] for t in [
                create_transaction(payload=uuid4().hex.encode(), signer=signer)
                for _ in range(20)
            ]
        ]

        txn_ids = [t.header_signature for t in txns]
        batch = create_batch(transactions=txns, signer=signer)

        blk_w = create_block(counter,
                             previous_block_id,
                             current_block_id,
                             batches=[batch])
        blocks.append((current_block_id, blk_w, txn_ids))

        counter += 1
        previous_block_id = current_block_id

    return blocks
Esempio n. 16
0
    def setUp(self):
        self.dir = tempfile.mkdtemp()
        self.block_db = NativeLmdbDatabase(
            os.path.join(self.dir, 'block.lmdb'),
            BlockStore.create_index_configuration())
        self.block_store = BlockStore(self.block_db)
        self.block_manager = BlockManager()
        self.block_manager.add_commit_store(self.block_store)
        self.gossip = MockGossip()
        self.completer = Completer(
            block_manager=self.block_manager,
            transaction_committed=self.block_store.has_transaction,
            get_committed_batch_by_id=self.block_store.get_batch,
            get_committed_batch_by_txn_id=(
                self.block_store.get_batch_by_transaction),
            gossip=self.gossip)
        self.completer.set_get_chain_head(lambda: self.block_store.chain_head)
        self.completer.set_on_block_received(self._on_block_received)
        self.completer.set_on_batch_received(self._on_batch_received)
        self._has_block_value = True

        context = create_context('secp256k1')
        private_key = context.new_random_private_key()
        crypto_factory = CryptoFactory(context)
        self.signer = crypto_factory.new_signer(private_key)

        self.blocks = []
        self.batches = []
Esempio n. 17
0
    def __init__(self, with_genesis=True):
        self.block_sender = MockBlockSender()
        self.batch_sender = MockBatchSender()
        self.dir = tempfile.mkdtemp()
        self.block_db = NativeLmdbDatabase(
            os.path.join(self.dir, 'block.lmdb'),
            BlockStore.create_index_configuration())
        self.block_store = BlockStore(self.block_db)
        self.block_cache = BlockCache(self.block_store)
        self.state_db = NativeLmdbDatabase(
            os.path.join(self.dir, "merkle.lmdb"),
            MerkleDatabase.create_index_configuration())

        self.state_view_factory = NativeStateViewFactory(self.state_db)

        self.block_manager = BlockManager()
        self.block_manager.add_commit_store(self.block_store)

        context = create_context('secp256k1')
        private_key = context.new_random_private_key()
        crypto_factory = CryptoFactory(context)
        self.signer = crypto_factory.new_signer(private_key)

        identity_private_key = context.new_random_private_key()
        self.identity_signer = crypto_factory.new_signer(identity_private_key)
        chain_head = None
        if with_genesis:
            self.genesis_block = self.generate_genesis_block()
            chain_head = self.genesis_block
            self.block_manager.put([chain_head.block])
            self.block_manager.persist(chain_head.block.header_signature,
                                       "commit_store")

        self.block_publisher = BlockPublisher(
            block_manager=self.block_manager,
            transaction_executor=MockTransactionExecutor(),
            transaction_committed=self.block_store.has_transaction,
            batch_committed=self.block_store.has_batch,
            state_view_factory=self.state_view_factory,
            block_sender=self.block_sender,
            batch_sender=self.block_sender,
            chain_head=chain_head.block,
            identity_signer=self.identity_signer,
            data_dir=None,
            config_dir=None,
            permission_verifier=MockPermissionVerifier(),
            batch_observers=[])
Esempio n. 18
0
def get_signature(message, private_key, privkey_format="wif"):
    context = create_context("secp256k1")
    factory = CryptoFactory(context)
    
    privkey = Secp256k1PrivateKey.from_hex(private_key)  
    signer = factory.new_signer(privkey)
    signature = signer.sign(message.encode())    
    return signature
Esempio n. 19
0
def test_single_key_signing(benchmark):
    context = create_context("secp256k1")
    factory = CryptoFactory(context)
    priv_key = Secp256k1PrivateKey.from_hex(KEY1_PRIV_HEX)
    signer = factory.new_signer(priv_key)
    signature = benchmark(signer.sign, MSG1.encode())

    assert signature == MSG1_KEY1_SIG
Esempio n. 20
0
def __fabricate_signer():
    """Fabricate private, public and signer keys"""
    context = create_context('secp256k1')
    private_key = context.new_random_private_key()
    public_key = context.get_public_key(private_key)
    crypto_factory = CryptoFactory(context)
    signer_key = crypto_factory.new_signer(private_key)
    return (public_key.as_hex(), private_key.as_hex(), signer_key)
Esempio n. 21
0
    def __init__(self, with_genesis=True):
        self.block_sender = MockBlockSender()
        self.batch_sender = MockBatchSender()
        self.block_store = BlockStore(DictDatabase(
            indexes=BlockStore.create_index_configuration()))
        self.block_cache = BlockCache(self.block_store)
        self.state_db = {}

        # add the mock reference to the consensus
        consensus_setting_addr = SettingsView.setting_address(
            'sawtooth.consensus.algorithm')
        self.state_db[consensus_setting_addr] = _setting_entry(
            'sawtooth.consensus.algorithm', 'test_journal.mock_consensus')

        self.state_view_factory = MockStateViewFactory(self.state_db)
        context = create_context('secp256k1')
        private_key = context.new_random_private_key()
        crypto_factory = CryptoFactory(context)
        self.signer = crypto_factory.new_signer(private_key)

        identity_private_key = context.new_random_private_key()
        self.identity_signer = crypto_factory.new_signer(identity_private_key)
        chain_head = None
        if with_genesis:
            self.genesis_block = self.generate_genesis_block()
            self.set_chain_head(self.genesis_block)
            chain_head = self.genesis_block

        self.block_publisher = BlockPublisher(
            transaction_executor=MockTransactionExecutor(),
            block_cache=self.block_cache,
            state_view_factory=self.state_view_factory,
            settings_cache=SettingsCache(
                SettingsViewFactory(self.state_view_factory),
            ),
            block_sender=self.block_sender,
            batch_sender=self.block_sender,
            squash_handler=None,
            chain_head=chain_head,
            identity_signer=self.identity_signer,
            data_dir=None,
            config_dir=None,
            permission_verifier=MockPermissionVerifier(),
            check_publish_block_frequency=0.1,
            batch_observers=[])
Esempio n. 22
0
    def __init__(self, with_genesis=True):
        self.block_sender = MockBlockSender()
        self.batch_sender = MockBatchSender()
        self.block_store = BlockStore(DictDatabase(
            indexes=BlockStore.create_index_configuration()))
        self.block_cache = BlockCache(self.block_store)
        self.state_db = {}

        # add the mock reference to the consensus
        consensus_setting_addr = SettingsView.setting_address(
            'sawtooth.consensus.algorithm')
        self.state_db[consensus_setting_addr] = _setting_entry(
            'sawtooth.consensus.algorithm', 'test_journal.mock_consensus')

        self.state_view_factory = MockStateViewFactory(self.state_db)
        context = create_context('secp256k1')
        private_key = context.new_random_private_key()
        crypto_factory = CryptoFactory(context)
        self.signer = crypto_factory.new_signer(private_key)

        identity_private_key = context.new_random_private_key()
        self.identity_signer = crypto_factory.new_signer(identity_private_key)
        chain_head = None
        if with_genesis:
            self.genesis_block = self.generate_genesis_block()
            self.set_chain_head(self.genesis_block)
            chain_head = self.genesis_block

        self.block_publisher = BlockPublisher(
            transaction_executor=MockTransactionExecutor(),
            block_cache=self.block_cache,
            state_view_factory=self.state_view_factory,
            settings_cache=SettingsCache(
                SettingsViewFactory(self.state_view_factory),
            ),
            block_sender=self.block_sender,
            batch_sender=self.block_sender,
            squash_handler=None,
            chain_head=chain_head,
            identity_signer=self.identity_signer,
            data_dir=None,
            config_dir=None,
            permission_verifier=MockPermissionVerifier(),
            check_publish_block_frequency=0.1,
            batch_observers=[])
Esempio n. 23
0
 def test_key_signing(self):
     self.assertTrue(callable(generate_random_string))
     signer_keypair = Key()
     message = generate_random_string(50)
     factory = CryptoFactory(sawtooth_signing.create_context("secp256k1"))
     signer = factory.new_signer(
         Secp256k1PrivateKey.from_hex(signer_keypair.private_key))
     signature = signer.sign(bytes(message, "utf8"))
     self.assertTrue(SIGNATURE_PATTERN.match(signature))
     return signature, message, signer_keypair.public_key
Esempio n. 24
0
 def __init__(self):
     self._context = create_context('secp256k1') 
     LOGGER.debug('_do_set: context')
     self._private_key = Secp256k1PrivateKey.new_random()
     LOGGER.debug('_do_set: context private_key=%s',self._private_key.as_hex())
     self._public_key = self._context.get_public_key(self._private_key)
     crypto_factory = CryptoFactory(self._context)
     self._signer = crypto_factory.new_signer(self._private_key)
     #self._signer = CryptoFactory(self._context).new_signer(self.private_key)
     LOGGER.debug('_do_set: public_key=%s  ',self._public_key.as_hex())
     LOGGER.info('BgtTransactionHandler init DONE')
Esempio n. 25
0
def wrap_payload_in_txn_batch(txn_key, payload, header, batch_key):
    """Takes the serialized RBACPayload and creates a batch_list, batch
    signature tuple.

    Args:
        txn_key (Key): The txn signer's public/private key pair.
        payload (bytes): The serialized RBACPayload.
        header (bytes): The serialized TransactionHeader.
        batch_key (Key): The batch signer's public/private key pair.

    Returns:
        tuple
            The zeroth element is a BatchList, and the first element is
            the batch header_signature.
    """

    factory = CryptoFactory(sawtooth_signing.create_context("secp256k1"))

    txn_signer = factory.new_signer(
        Secp256k1PrivateKey.from_hex(txn_key.private_key))
    transaction = transaction_pb2.Transaction(
        payload=payload,
        header=header,
        header_signature=txn_signer.sign(header))

    batch_header = batch_pb2.BatchHeader(
        signer_public_key=batch_key.public_key,
        transaction_ids=[transaction.header_signature],
    ).SerializeToString()

    batch_signer = factory.new_signer(
        Secp256k1PrivateKey.from_hex(batch_key.private_key))
    batch = batch_pb2.Batch(
        header=batch_header,
        header_signature=batch_signer.sign(batch_header),
        transactions=[transaction],
    )

    batch_list = batch_pb2.BatchList(batches=[batch])
    return batch_list, batch.header_signature
Esempio n. 26
0
 def setUp(self):
     context = create_context('secp256k1')
     crypto_factory = CryptoFactory(context)
     private_key = context.new_random_private_key()
     self.signer = crypto_factory.new_signer(private_key)
     self._identity_view_factory = MockIdentityViewFactory()
     self.permissions = {}
     self._identity_cache = IdentityCache(self._identity_view_factory)
     self.permission_verifier = \
         PermissionVerifier(
             permissions=self.permissions,
             current_root_func=self._current_root_func,
             identity_cache=self._identity_cache)
    def test_single_key_signing(self):
        context = create_context("secp256k1")
        self.assertEqual(context.get_algorithm_name(), "secp256k1")

        factory = CryptoFactory(context)
        self.assertEqual(factory.context.get_algorithm_name(), "secp256k1")

        priv_key = Secp256k1PrivateKey.from_hex(KEY1_PRIV_HEX)
        self.assertEqual(priv_key.get_algorithm_name(), "secp256k1")
        self.assertEqual(priv_key.as_hex(), KEY1_PRIV_HEX)

        signer = factory.new_signer(priv_key)
        signature = signer.sign(MSG1.encode())
        self.assertEqual(signature, MSG1_KEY1_SIG)
Esempio n. 28
0
 def setUp(self):
     context = create_context('secp256k1')
     crypto_factory = CryptoFactory(context)
     private_key = context.new_random_private_key()
     self.signer = crypto_factory.new_signer(private_key)
     self._identity_view_factory = MockIdentityViewFactory()
     self.permissions = {}
     self._identity_cache = IdentityCache(
         self._identity_view_factory)
     self.permission_verifier = \
         PermissionVerifier(
             permissions=self.permissions,
             current_root_func=self._current_root_func,
             identity_cache=self._identity_cache)
Esempio n. 29
0
    def setUp(self):
        self.block_store = BlockStore(
            DictDatabase(indexes=BlockStore.create_index_configuration()))
        self.gossip = MockGossip()
        self.completer = Completer(self.block_store, self.gossip)
        self.completer._on_block_received = self._on_block_received
        self.completer._on_batch_received = self._on_batch_received

        context = create_context('secp256k1')
        private_key = context.new_random_private_key()
        crypto_factory = CryptoFactory(context)
        self.signer = crypto_factory.new_signer(private_key)

        self.blocks = []
        self.batches = []
Esempio n. 30
0
    def setUp(self):
        self.block_store = BlockStore(DictDatabase(
            indexes=BlockStore.create_index_configuration()))
        self.gossip = MockGossip()
        self.completer = Completer(self.block_store, self.gossip)
        self.completer._on_block_received = self._on_block_received
        self.completer._on_batch_received = self._on_batch_received
        self.completer._has_block = self._has_block
        self._has_block_value = True

        context = create_context('secp256k1')
        private_key = context.new_random_private_key()
        crypto_factory = CryptoFactory(context)
        self.signer = crypto_factory.new_signer(private_key)

        self.blocks = []
        self.batches = []
Esempio n. 31
0
def do_generate(args, batches, keys, value, bNeedSetDesp):
    context = create_context('secp256k1')
    private_key = context.new_random_private_key()
    crypto_factory = CryptoFactory(context)
    signer = crypto_factory.new_signer(private_key)

    start = time.time()
    total_txn_count = 0
    for i in range(1):
        txns = []
        for _ in range(1):
            name = random.choice(list(keys))
            txn = create_intkey_transaction(
                # verb=random.choice(['inc', 'dec']),
                verb='inc',
                name=name,
                # value=random.randint(1, 10),
                value=value,
                # deps=[keys[name]],
                deps= [keys[name]] if bNeedSetDesp else [],
                signer=signer)
            total_txn_count += 1
            txns.append(txn)

        batch = create_batch(
            transactions=txns,
            signer=signer)

        batches.append(batch)

        if i % 100 == 0 and i != 0:
            stop = time.time()

            txn_count = 0
            for batch in batches[-100:]:
                txn_count += len(batch.transactions)

            fmt = 'batches {}, batch/sec: {:.2f}, txns: {}, txns/sec: {:.2f}'
            print(fmt.format(
                str(i),
                100 / (stop - start),
                str(total_txn_count),
                txn_count / (stop - start)))
            start = stop
def gen_signer_key(key_file):
    from sawtooth_signing import create_context
    from sawtooth_signing import CryptoFactory
    from sawtooth_signing.secp256k1 import Secp256k1PrivateKey
    from sawtooth_signing import ParseError

    context = create_context('secp256k1')
    crypto_factory = CryptoFactory(context=context)
    if key_file is not None:
        try:
            with open(key_file, 'r') as infile:
                signing_key = infile.read().strip()
            private_key = Secp256k1PrivateKey.from_hex(signing_key)
        except ParseError as pe:
            raise CliException(str(pe))
        except IOError as ioe:
            raise CliException(str(ioe))
    else:
        private_key = context.new_random_private_key()
    return crypto_factory.new_signer(private_key)
Esempio n. 33
0
def do_generate(args, batches, keys):
    context = create_context('secp256k1')
    private_key = context.new_random_private_key()
    crypto_factory = CryptoFactory(context)
    signer = crypto_factory.new_signer(private_key)

    start = time.time()
    total_txn_count = 0
    for i in range(0, args.count):
        txns = []
        for _ in range(0, random.randint(1, args.max_batch_size)):
            name = random.choice(list(keys))
            txn = create_intkey_transaction(
                verb=random.choice(['inc', 'dec']),
                name=name,
                value=random.randint(1, 10),
                deps=[keys[name]],
                signer=signer)
            total_txn_count += 1
            txns.append(txn)

        batch = create_batch(
            transactions=txns,
            signer=signer)

        batches.append(batch)

        if i % 100 == 0 and i != 0:
            stop = time.time()

            txn_count = 0
            for batch in batches[-100:]:
                txn_count += len(batch.transactions)

            fmt = 'batches {}, batch/sec: {:.2f}, txns: {}, txns/sec: {:.2f}'
            print(fmt.format(
                str(i),
                100 / (stop - start),
                str(total_txn_count),
                txn_count / (stop - start)))
            start = stop
Esempio n. 34
0
def __read_signer(signing_key):
    """Reads the given file as a hex key.

    Args:
        private_key: The private key from file

    Returns:
        Signer: the signer

    Raises:
        CliException: If unable to create Secp256k1PrivateKey
    """

    try:
        private_key = Secp256k1PrivateKey.from_hex(signing_key)
    except ParseError as e:
        raise CliException('Unable to create Secp256k1PrivateKey: {}'.format(
            str(e)))

    context = create_context('secp256k1')
    crypto_factory = CryptoFactory(context)
    return crypto_factory.new_signer(private_key)
Esempio n. 35
0
    def test_authorization_challenge_submit_bad_signature(self):
        """
        Test the AuthorizationChallengeSubmitHandler returns an
        AuthorizationViolation and closes the connection if the signature
        is not verified.
        """
        context = create_context('secp256k1')
        private_key = context.new_random_private_key()
        crypto_factory = CryptoFactory(context)
        signer = crypto_factory.new_signer(private_key)

        payload = os.urandom(10)

        signature = signer.sign(payload)

        auth_challenge_submit = AuthorizationChallengeSubmit(
            public_key="other",
            signature=signature,
            roles=[RoleType.Value("NETWORK")])

        roles = {"network": AuthorizationType.TRUST}

        network = MockNetwork(
            roles,
            connection_status={
                "connection_id": ConnectionStatus.AUTH_CHALLENGE_REQUEST
            })
        permission_verifer = MockPermissionVerifier()
        gossip = MockGossip()
        handler = AuthorizationChallengeSubmitHandler(
            network, permission_verifer, gossip, {"connection_id": payload})
        handler_status = handler.handle(
            "connection_id",
            auth_challenge_submit.SerializeToString())
        self.assertEqual(handler_status.status, HandlerStatus.RETURN_AND_CLOSE)
        self.assertEqual(
            handler_status.message_type,
            validator_pb2.Message.AUTHORIZATION_VIOLATION)
Esempio n. 36
0
def load_identity_signer(key_dir, key_name):
    """Loads a private key from the key directory, based on a validator's
    identity.

    Args:
        key_dir (str): The path to the key directory.
        key_name (str): The name of the key to load.

    Returns:
        Signer: the cryptographic signer for the key
    """
    key_path = os.path.join(key_dir, '{}.priv'.format(key_name))

    if not os.path.exists(key_path):
        raise LocalConfigurationError(
            "No such signing key file: {}".format(key_path))
    if not os.access(key_path, os.R_OK):
        raise LocalConfigurationError(
            "Key file is not readable: {}".format(key_path))

    LOGGER.info('Loading signing key: %s', key_path)
    try:
        with open(key_path, 'r') as key_file:
            private_key_str = key_file.read().strip()
    except IOError as e:
        raise LocalConfigurationError(
            "Could not load key file: {}".format(str(e)))

    try:
        private_key = Secp256k1PrivateKey.from_hex(private_key_str)
    except signing.ParseError as e:
        raise LocalConfigurationError(
            "Invalid key in file {}: {}".format(key_path, str(e)))

    context = signing.create_context('secp256k1')
    crypto_factory = CryptoFactory(context)
    return crypto_factory.new_signer(private_key)
Esempio n. 37
0
def create_chain(num=10):
    context = create_context('secp256k1')
    private_key = context.new_random_private_key()
    crypto_factory = CryptoFactory(context)
    signer = crypto_factory.new_signer(private_key)

    counter = 1
    previous_block_id = "0000000000000000"
    blocks = []
    while counter <= num:
        current_block_id = uuid4().hex
        txns = [
            t[0]
            for t in [
                create_transaction(
                    payload=uuid4().hex.encode(), signer=signer)
                for _ in range(20)
            ]
        ]

        txn_ids = [t.header_signature for t in txns]
        batch = create_batch(
            transactions=txns,
            signer=signer)

        blk_w = create_block(
            counter,
            previous_block_id,
            current_block_id,
            batches=[batch])
        blocks.append((current_block_id, blk_w, txn_ids))

        counter += 1
        previous_block_id = current_block_id

    return blocks
    def test_no_validator_registry(
            self,
            mock_utils,
            mock_validator_registry_view,
            mock_consensus_state,
            mock_poet_enclave_factory,
            mock_consensus_state_store,
            mock_poet_key_state_store,
            mock_signup_info,
            mock_poet_settings_view,
            mock_block_wrapper):
        """ Test verifies that PoET Block Publisher fails
        if a validator doesn't have any signup info
        in the validator registry (the validator is not listed
        in the validator registry)
        """

        # create a mock_validator_registry_view that throws KeyError
        mock_validator_registry_view.return_value.get_validator_info. \
            side_effect = KeyError('Non-existent validator')

        # create a mock_wait_certificate that does nothing in check_valid
        mock_wait_certificate = mock.Mock()
        mock_wait_certificate.check_valid.return_value = None

        mock_utils.deserialize_wait_certificate.return_value = \
            mock_wait_certificate

        # create a mock_consensus_state that returns a mock with
        # the following settings:
        mock_state = MockConsensusState.create_mock_consensus_state()

        mock_consensus_state.consensus_state_for_block_id.return_value = \
            mock_state

        mock_poet_key_state_store.return_value = \
            _MockPoetKeyStateStore(active_key=None)

        # create mock_signup_info
        mock_signup_info.create_signup_info.return_value = \
            mock.Mock(
                poet_public_key='poet public key',
                proof_data='proof data',
                anti_sybil_id='anti-sybil ID',
                sealed_signup_data='sealed signup data')
        mock_signup_info.block_id_to_nonce.return_value = 'nonce'

        # create mock_batch_publisher
        context = create_context('secp256k1')
        private_key = context.new_random_private_key()
        crypto_factory = CryptoFactory(context)
        signer = crypto_factory.new_signer(private_key)
        mock_batch_publisher = mock.Mock(
            identity_signer=signer)

        mock_block_cache = mock.MagicMock()
        mock_state_view_factory = mock.Mock()

        # create mock_block_header with the following fields
        mock_block = mock.Mock(identifier='0123456789abcdefedcba9876543210')
        mock_block.header.signer_public_key = \
            '90834587139405781349807435098745'
        mock_block.header.previous_block_id = '2'
        mock_block.header.block_num = 1
        mock_block.header.state_root_hash = '6'
        mock_block.header.batch_ids = '4'

        # check test
        block_publisher = \
            poet_block_publisher.PoetBlockPublisher(
                block_cache=mock_block_cache,
                state_view_factory=mock_state_view_factory,
                batch_publisher=mock_batch_publisher,
                data_dir=self._temp_dir,
                config_dir=self._temp_dir,
                validator_id='validator_deadbeef')

        self.assertFalse(
            block_publisher.initialize_block(
                block_header=mock_block.header))

        # check that batch publisher was called to send out
        # the txn header and txn for the validator registry update
        self.assertTrue(mock_batch_publisher.send.called)
Esempio n. 39
0
 def setUp(self):
     context = create_context('secp256k1')
     private_key = context.new_random_private_key()
     crypto_factory = CryptoFactory(context)
     self.signer = crypto_factory.new_signer(private_key)
Esempio n. 40
0
    def test_block_publisher_doesnt_claim_readiness(
            self,
            mock_utils,
            mock_validator_registry_view,
            mock_consensus_state,
            mock_poet_enclave_factory,
            mock_consensus_state_store,
            mock_poet_key_state_store,
            mock_signup_info,
            mock_wait_time,
            mock_poet_settings_view,
            mock_block_wrapper):
        """ Test verifies that PoET Block Publisher doesn't
         claims readiness if the wait timer hasn't expired
        """

        # create a mock_validator_registry_view with
        # get_validator_info that does nothing
        mock_validator_registry_view.return_value.get_validator_info. \
            return_value = \
            ValidatorInfo(
                name='validator_001',
                id='validator_deadbeef',
                signup_info=SignUpInfo(
                    poet_public_key='00112233445566778899aabbccddeeff'))

        # create a mock_consensus_state that returns a mock with
        # the following settings:
        mock_state = MockConsensusState.create_mock_consensus_state()

        mock_consensus_state.consensus_state_for_block_id.return_value = \
            mock_state

        mock_consensus_state_store.return_value.__getitem__.return_value = \
            mock_consensus_state

        # Create mock key state
        mock_poet_key_state_store.return_value.__getitem__.return_value = \
            mock.Mock(
                sealed_signup_data='sealed signup data',
                has_been_refreshed=False)

        # create mock_signup_info
        mock_signup_info.unseal_signup_data.return_value = \
            '00112233445566778899aabbccddeeff'

        # create mock_batch_publisher
        context = create_context('secp256k1')
        private_key = context.new_random_private_key()
        crypto_factory = CryptoFactory(context)
        signer = crypto_factory.new_signer(private_key)

        mock_batch_publisher = mock.Mock(identity_signer=signer)

        mock_block_cache = mock.MagicMock()
        mock_state_view_factory = mock.Mock()

        # create mock_block_header with the following fields
        mock_block = mock.Mock(identifier='0123456789abcdefedcba9876543210')
        mock_block.header.signer_public_key = \
            '90834587139405781349807435098745'
        mock_block.header.previous_block_id = '2'
        mock_block.header.block_num = 1
        mock_block.header.state_root_hash = '6'
        mock_block.header.batch_ids = '4'

        # create a mock_wait_timer that hasn't expired yet
        my_wait_time = mock.Mock()
        my_wait_time.has_expired.return_value = False

        mock_wait_time.create_wait_timer.return_value = my_wait_time

        # create mock_poet_enclave_module
        mock_poet_enclave_module = mock.Mock()
        mock_poet_enclave_module.return_value = \
            mock_poet_enclave_factory.get_poet_enclave_module.return_value

        # check test
        block_publisher = \
            poet_block_publisher.PoetBlockPublisher(
                block_cache=mock_block_cache,
                state_view_factory=mock_state_view_factory,
                batch_publisher=mock_batch_publisher,
                data_dir=self._temp_dir,
                config_dir=self._temp_dir,
                validator_id='validator_deadbeef')

        # check initialize_block() first to set wait_timer
        self.assertTrue(
            block_publisher.initialize_block(
                block_header=mock_block.header))

        # check that block_publisher only claims readiness
        # when the wait_timer has expired
        self.assertFalse(
            block_publisher.check_publish_block(
                block_header=mock_block.header))
Esempio n. 41
0
    def test_no_validator_registry(
            self,
            mock_utils,
            mock_validator_registry_view,
            mock_consensus_state,
            mock_poet_enclave_factory,
            mock_consensus_state_store,
            mock_poet_key_state_store,
            mock_signup_info,
            mock_poet_settings_view,
            mock_block_wrapper):
        """ Test verifies that PoET Block Publisher fails
        if a validator doesn't have any signup info
        in the validator registry (the validator is not listed
        in the validator registry)
        """

        # create a mock_validator_registry_view that throws KeyError
        mock_validator_registry_view.return_value.get_validator_info. \
            side_effect = KeyError('Non-existent validator')

        # create a mock_wait_certificate that does nothing in check_valid
        mock_wait_certificate = mock.Mock()
        mock_wait_certificate.check_valid.return_value = None

        mock_utils.deserialize_wait_certificate.return_value = \
            mock_wait_certificate

        # create a mock_consensus_state that returns a mock with
        # the following settings:
        mock_state = MockConsensusState.create_mock_consensus_state()

        mock_consensus_state.consensus_state_for_block_id.return_value = \
            mock_state

        mock_poet_key_state_store.return_value = \
            _MockPoetKeyStateStore(active_key=None)

        # create mock_signup_info
        mock_signup_info.create_signup_info.return_value = \
            mock.Mock(
                poet_public_key='poet public key',
                proof_data='proof data',
                anti_sybil_id='anti-sybil ID',
                sealed_signup_data='sealed signup data')
        mock_signup_info.block_id_to_nonce.return_value = 'nonce'

        # create mock_batch_publisher
        context = create_context('secp256k1')
        private_key = context.new_random_private_key()
        crypto_factory = CryptoFactory(context)
        signer = crypto_factory.new_signer(private_key)
        mock_batch_publisher = mock.Mock(
            identity_signer=signer)

        mock_block_cache = mock.MagicMock()
        mock_state_view_factory = mock.Mock()

        # create mock_block_header with the following fields
        mock_block = mock.Mock(identifier='0123456789abcdefedcba9876543210')
        mock_block.header.signer_public_key = \
            '90834587139405781349807435098745'
        mock_block.header.previous_block_id = '2'
        mock_block.header.block_num = 1
        mock_block.header.state_root_hash = '6'
        mock_block.header.batch_ids = '4'

        # check test
        block_publisher = \
            poet_block_publisher.PoetBlockPublisher(
                block_cache=mock_block_cache,
                state_view_factory=mock_state_view_factory,
                batch_publisher=mock_batch_publisher,
                data_dir=self._temp_dir,
                config_dir=self._temp_dir,
                validator_id='validator_deadbeef')

        self.assertFalse(
            block_publisher.initialize_block(
                block_header=mock_block.header))

        # check that batch publisher was called to send out
        # the txn header and txn for the validator registry update
        self.assertTrue(mock_batch_publisher.send.called)
def get_signer():
    context = create_context('secp256k1')
    private_key = context.new_random_private_key()
    crypto_factory = CryptoFactory(context)
    return crypto_factory.new_signer(private_key)
class SchedulerTester(object):
    """ The canonical form of the yaml is:
      -  <------------------------------------------ batch start
        state_hash: string. Optional. No default.
        - <----------------------------------------- transaction start
          inputs: list of string. Required.
            - ....
          outputs: list of string. Required.
            - ....
          addresses_to_set: list of dict. Optional.
            - string <address>: Optional bytes <value>
          addresses_to_delete: list of str. Optional
            - string <address>
          valid: boolean. Optional. Defaults to True
          dependencies: list of string. Optional. Defaults to empty list.
            - ..... string. No default. If a dependency is the same
                            string as a 'name' for another txn, that
                            txn's signature will be used for the
                            actual Transaction's dependency. If the
                            string is not an 'name' of another txn, if
                            it is longer than 20 characters it will be
                            used as if is is the actual
                            Transaction.header_signature for the
                            dependency. If not, it will be
                            disregarded.
          name: string. Optional. No default.
    """

    def __init__(self, file_name):
        """

        Args:
            file_name (str): The yaml filename and path.
            scheduler (scheduler.Scheduler): Any Scheduler implementaion
            context_manager (context_manager.ContextManager): The context
                manager holding state for this scheduler.
        """
        self._context = create_context('secp256k1')
        self._crypto_factory = CryptoFactory(self._context)
        self._yaml_file_name = file_name
        self._counter = itertools.count(0)
        self._referenced_txns_in_other_batches = {}
        self._batch_id_by_txn_id = {}
        self._txn_execution = {}

        self._batch_results = {}
        self._batches = []

        self._create_batches()

    @property
    def batch_results(self):
        """The batch results calculated from the yaml file.

        Returns:
            (dict): Computed from the yaml file, a dictionary with
                batch signature keys and BatchExecutionResult values.
        """
        return self._batch_results

    def run_scheduler(self,
                      scheduler,
                      context_manager,
                      validation_state_hash=None,
                      txns_executed_fifo=True):
        """Add all the batches to the scheduler in order and then run through
        the txns in the scheduler, calling next_transaction() after each
        transaction_execution_result is set.

        Args:
            scheduler (scheduler.Scheduler): Any implementation of the
                Scheduler abstract base class.
            context_manager (context_manager.ContextManager): The context
                manager is needed to store state based on the yaml file.
            validation_state_hash (str): Used in cases where the yaml
                represents a single block of valid batches, and the
                state hash is not in the yaml file. This state hash is added
                to the last batch in the scheduler.

        Returns batch_results (list of tuples): A list of tuples of
            batch signature, BatchExecutionResult pairs.
        """

        for i, batch in enumerate(self._batches):
            if i == len(self._batches) - 1 and \
                    validation_state_hash is not None:
                s_h = validation_state_hash
            else:
                s_h = self._batch_results[batch.header_signature].state_hash
            scheduler.add_batch(batch=batch, state_hash=s_h)

        scheduler.finalize()
        txns_to_process = deque()

        txn_context_by_txn_id = self._compute_transaction_execution_context()

        transactions_to_assert_state = {}
        while not scheduler.complete(block=False):
            stop = False
            while not stop:
                try:
                    txn_info = scheduler.next_transaction()
                except StopIteration:
                    break
                if txn_info is not None:
                    txns_to_process.append(txn_info)
                    LOGGER.debug("Transaction %s scheduled",
                                 txn_info.txn.header_signature[:16])
                else:
                    stop = True
            try:
                if txns_executed_fifo:
                    t_info = txns_to_process.popleft()
                else:
                    t_info = txns_to_process.pop()
            except IndexError:
                # No new txn was returned from next_transaction so
                # check again if complete.
                continue

            inputs, outputs = self._get_inputs_outputs(t_info.txn)

            c_id = context_manager.create_context(
                state_hash=t_info.state_hash,
                base_contexts=t_info.base_context_ids,
                inputs=inputs,
                outputs=outputs)

            t_id = t_info.txn.header_signature

            if t_id in txn_context_by_txn_id:
                state_up_to_now = txn_context_by_txn_id[t_id].state
                txn_context = txn_context_by_txn_id[t_id]
                inputs, _ = self._get_inputs_outputs(txn_context.txn)
                addresses = [input for input in inputs if len(input) == 70]
                state_found = context_manager.get(
                    context_id=c_id,
                    address_list=addresses)

                LOGGER.debug("Transaction Id %s, Batch %s, Txn %s, "
                             "Context_id %s, Base Contexts %s",
                             t_id[:16],
                             txn_context.batch_num,
                             txn_context.txn_num,
                             c_id,
                             t_info.base_context_ids)

                state_to_assert = [(add, state_up_to_now.get(add))
                                   for add, _ in state_found]
                transactions_to_assert_state[t_id] = (txn_context,
                                                      state_found,
                                                      state_to_assert)

            validity, address_values, deletes = self._txn_execution[
                t_info.txn.header_signature]

            context_manager.set(
                context_id=c_id,
                address_value_list=address_values)

            context_manager.delete(
                context_id=c_id,
                address_list=deletes)
            LOGGER.debug("Transaction %s is %s",
                         t_id[:16],
                         'valid' if validity else 'invalid')
            scheduler.set_transaction_execution_result(
                txn_signature=t_info.txn.header_signature,
                is_valid=validity,
                context_id=c_id)

        batch_ids = [b.header_signature for b in self._batches]
        batch_results = [
            (b_id, scheduler.get_batch_execution_result(b_id))
            for b_id in batch_ids]

        return batch_results, transactions_to_assert_state

    def run_scheduler_alternating(self, scheduler, context_manager,
                                  validation_state_hash=None,
                                  txns_executed_fifo=True):
        batches = deque()
        batches.extend(self._batches)

        txns_to_process = deque()

        txn_context_by_txn_id = self._compute_transaction_execution_context()

        transactions_to_assert_state = {}
        while not scheduler.complete(block=False):
            stop = False
            while not stop:
                try:
                    txn_info = scheduler.next_transaction()
                except StopIteration:
                    stop = True

                if txn_info is not None:
                    txns_to_process.append(txn_info)
                    LOGGER.debug("Transaction %s scheduled",
                                 txn_info.txn.header_signature[:16])
                else:
                    stop = True

            try:
                scheduler.add_batch(batches.popleft())
            except IndexError:
                scheduler.finalize()

            try:
                if txns_executed_fifo:
                    t_info = txns_to_process.popleft()
                else:
                    t_info = txns_to_process.pop()
            except IndexError:
                # No new txn was returned from next_transaction so
                # check again if complete.
                continue

            inputs, outputs = self._get_inputs_outputs(t_info.txn)

            c_id = context_manager.create_context(
                state_hash=t_info.state_hash,
                base_contexts=t_info.base_context_ids,
                inputs=inputs,
                outputs=outputs)

            t_id = t_info.txn.header_signature

            if t_id in txn_context_by_txn_id:
                state_up_to_now = txn_context_by_txn_id[t_id].state
                txn_context = txn_context_by_txn_id[t_id]
                inputs, _ = self._get_inputs_outputs(txn_context.txn)
                addresses = [input for input in inputs if len(input) == 70]
                state_found = context_manager.get(
                    context_id=c_id,
                    address_list=addresses)

                LOGGER.debug("Transaction Id %s, Batch %s, Txn %s, "
                             "Context_id %s, Base Contexts %s",
                             t_id[:16],
                             txn_context.batch_num,
                             txn_context.txn_num,
                             c_id,
                             t_info.base_context_ids)

                state_to_assert = [(add, state_up_to_now.get(add))
                                   for add, _ in state_found]
                transactions_to_assert_state[t_id] = (txn_context,
                                                      state_found,
                                                      state_to_assert)

            validity, address_values, deletes = self._txn_execution[
                t_info.txn.header_signature]

            context_manager.set(
                context_id=c_id,
                address_value_list=address_values)
            context_manager.delete(
                context_id=c_id,
                address_list=deletes)
            LOGGER.debug("Transaction %s is %s",
                         t_id[:16],
                         'valid' if validity else 'invalid')
            scheduler.set_transaction_execution_result(
                txn_signature=t_info.txn.header_signature,
                is_valid=validity,
                context_id=c_id)

        batch_ids = [b.header_signature for b in self._batches]
        batch_results = [
            (b_id, scheduler.get_batch_execution_result(b_id))
            for b_id in batch_ids]

        return batch_results, transactions_to_assert_state

    def compute_state_hashes_wo_scheduler(self, base_dir):
        """Creates a state hash from the state updates from each txn in a
        valid batch.

        Returns state_hashes (list of str): The merkle roots from state
            changes in 1 or more blocks in the yaml file.

        """

        database = NativeLmdbDatabase(
            os.path.join(base_dir, 'compute_state_hashes_wo_scheduler.lmdb'),
            indexes=MerkleDatabase.create_index_configuration(),
            _size=10 * 1024 * 1024)

        tree = MerkleDatabase(database=database)
        state_hashes = []
        updates = {}
        for batch in self._batches:
            b_id = batch.header_signature
            result = self._batch_results[b_id]
            if result.is_valid:
                for txn in batch.transactions:
                    txn_id = txn.header_signature
                    _, address_values, deletes = self._txn_execution[txn_id]
                    batch_updates = {}
                    for pair in address_values:
                        batch_updates.update({a: pair[a] for a in pair.keys()})

                    # since this is entirely serial, any overwrite
                    # of an address is expected and desirable.
                    updates.update(batch_updates)

                    for address in deletes:
                        if address in updates:
                            del updates[address]

            # This handles yaml files that have state roots in them
            if result.state_hash is not None:
                s_h = tree.update(set_items=updates, virtual=False)
                tree.set_merkle_root(merkle_root=s_h)
                state_hashes.append(s_h)
        if not state_hashes:
            state_hashes.append(tree.update(set_items=updates))
        return state_hashes

    def _compute_transaction_execution_context(self):
        """Compute the serial state for each txn in the yaml file up to and
        including the invalid txn in each invalid batch.

        Notes:
            The TransactionExecutionContext for a txn will contain the
            state applied serially up to that point for each valid batch and
            then for invalid batches up to the invalid txn.

        Returns:
            dict: The transaction id to the TransactionExecutionContext
        """
        transaction_contexts = {}
        state_up_to_now = {}

        for batch_num, batch in enumerate(self._batches):
            partial_batch_transaction_contexts = {}
            partial_batch_state_up_to_now = state_up_to_now.copy()
            for txn_num, txn in enumerate(batch.transactions):
                t_id = txn.header_signature
                is_valid, address_values, deletes = self._txn_execution[t_id]
                partial_batch_transaction_contexts[t_id] = \
                    TransactionExecutionContext(
                        txn=txn,
                        txn_num=txn_num + 1,
                        batch_num=batch_num + 1,
                        state=partial_batch_state_up_to_now.copy())

                for item in address_values:
                    partial_batch_state_up_to_now.update(item)
                for address in deletes:
                    if address in partial_batch_state_up_to_now:
                        partial_batch_state_up_to_now[address] = None
                if not is_valid:
                    break
            batch_id = batch.header_signature
            batch_is_valid = self._batch_results[batch_id].is_valid

            if batch_is_valid:
                transaction_contexts.update(partial_batch_transaction_contexts)
                state_up_to_now.update(partial_batch_state_up_to_now)

        return transaction_contexts

    def _address(self, add, require_full=False):
        if ':sha' not in add and ',' not in add:
            return add

        if ',' in add:
            return binascii.hexlify(bytearray(
                [int(i) for i in add.split(',')]))

        parts = add.split(':')
        if len(parts) == 3 and parts[2] == 'sha':
            # eg. 'yy:aaabbbb:sha'
            namespace = hashlib.sha512(parts[0].encode()).hexdigest()[:6]
            address = namespace + hashlib.sha512(
                parts[1].encode()).hexdigest()[:64]
        elif len(parts) == 3 and not require_full:
            # eg. 'a:sha:56'
            length = min(int(parts[2]), 70)
            address = hashlib.sha512(parts[0].encode()).hexdigest()[:length]
        elif len(parts) == 2:
            # eg. 'aaabbbb:sha'
            intermediate = parts[0]
            address = hashlib.sha512(intermediate.encode()).hexdigest()[:70]
        else:
            raise ValueError("Address specified by {} could "
                             "not be formed".format(add))
        return address

    def _get_inputs_outputs(self, txn):
        """Similarly to the TransactionExecutor, deserialize the inputs and
         outputs.

         Notes:
             The SchedulerTester has the inputs and outputs from the yaml file
             that it used to create the transaction, but it seems less
             error-prone to recreate the behavior of the TransactionExecutor.

        Args:
            txn (sawtooth_validator.protobuf.transaction_pb2.Transaction)

        Returns (tuple): (inputs, outputs)

        """

        header = transaction_pb2.TransactionHeader()
        header.ParseFromString(txn.header)

        return list(header.inputs), list(header.outputs)

    def _bytes_if_none(self, value):
        if value is None:
            value = uuid.uuid4().hex.encode()
        return value

    def _yaml_from_file(self):
        with open(self._yaml_file_name, 'r') as infile:
            test_yaml = yaml.safe_load(infile)
        return test_yaml

    def _contains_and_not_none(self, key, obj):
        return key in obj and obj[key] is not None

    def _process_batches(self, yaml_batches, signer):
        batches = []
        b_results = {}
        for batch in yaml_batches:
            batch_state_root = None
            if self._contains_and_not_none('state_hash', batch):
                batch_state_root = batch['state_hash']

            txn_processing_result = self._process_txns(
                batch=batch,
                previous_batch_results=b_results.copy(),
                signer=signer)
            txns, batch_is_valid = txn_processing_result
            batch_real = create_batch(
                transactions=txns,
                signer=signer)
            for txn in txns:
                txn_id = txn.header_signature
                batch_id = batch_real.header_signature
                self._batch_id_by_txn_id[txn_id] = batch_id

            b_results[batch_real.header_signature] = BatchExecutionResult(
                is_valid=batch_is_valid,
                state_hash=batch_state_root)
            batches.append(batch_real)
        return batches, b_results

    def _dependencies_are_valid(self, dependencies, previous_batch_results):
        for dep in dependencies:
            if dep in self._batch_id_by_txn_id:
                batch_id = self._batch_id_by_txn_id[dep]
                dep_result = previous_batch_results[batch_id]
                if not dep_result.is_valid:
                    return False
        return True

    def _process_txns(self, batch, previous_batch_results, signer):
        txns = []
        referenced_txns = {}
        execution = {}
        batch_is_valid = True
        for transaction in batch:
            is_valid = True
            addresses_to_set = []
            addresses_to_delete = []
            inputs = transaction['inputs']
            outputs = transaction['outputs']
            inputs_real = [self._address(a) for a in inputs]
            outputs_real = [self._address(a) for a in outputs]
            if self._contains_and_not_none('addresses_to_set', transaction):
                addresses_to_set = [{
                    self._address(a, require_full=True): self._bytes_if_none(
                        d[a])
                    for a in d
                } for d in transaction['addresses_to_set']]
            if self._contains_and_not_none('addresses_to_delete', transaction):
                addresses_to_delete = [
                    self._address(a, require_full=True)
                    for a in transaction['addresses_to_delete']
                ]

            if self._contains_and_not_none('dependencies', transaction):
                if any([
                        a not in self._referenced_txns_in_other_batches
                        and len(a) <= 20 for a in transaction['dependencies']
                ]):
                    # This txn has a dependency with a txn signature that is
                    # not known about,
                    return None

                dependencies = [
                    self._referenced_txns_in_other_batches[a]
                    if a in self._referenced_txns_in_other_batches else a
                    for a in transaction['dependencies']
                ]
                dependencies = [a for a in dependencies if len(a) > 20]
            else:
                dependencies = []

            deps_valid = self._dependencies_are_valid(
                dependencies,
                previous_batch_results)

            if self._contains_and_not_none('valid', transaction):
                is_valid = bool(transaction['valid'])

            if not is_valid or not deps_valid:
                batch_is_valid = False

            txn, _ = create_transaction(
                payload=uuid.uuid4().hex.encode(),
                dependencies=dependencies,
                inputs=inputs_real,
                outputs=outputs_real,
                signer=signer)

            if self._contains_and_not_none('name', transaction):
                referenced_txns[transaction['name']] = txn.header_signature

            execution[txn.header_signature] = (is_valid,
                                               addresses_to_set,
                                               addresses_to_delete)
            txns.append(txn)

        self._txn_execution.update(execution)
        self._referenced_txns_in_other_batches.update(referenced_txns)
        return txns, batch_is_valid

    def _create_batches(self):
        test_yaml = self._yaml_from_file()
        private_key = self._context.new_random_private_key()
        signer = self._crypto_factory.new_signer(private_key)

        batches, batch_results = self._process_batches(
            yaml_batches=test_yaml,
            signer=signer)

        self._batch_results = batch_results
        self._batches = batches
    def test_signup_info_not_committed_within_allowed_delay(
            self,
            mock_utils,
            mock_validator_registry_view,
            mock_consensus_state,
            mock_poet_enclave_factory,
            mock_consensus_state_store,
            mock_poet_key_state_store,
            mock_signup_info,
            mock_poet_settings_view,
            mock_block_wrapper):
        """ Test verifies that PoET Block Publisher fails if
        a validator's signup info was not committed to
        the block chain within the allowed configured delay
        """

        # create a mock_validator_registry_view with
        # get_validator_info that does nothing
        mock_validator_registry_view.return_value.get_validator_info. \
            return_value = \
            ValidatorInfo(
                name='validator_001',
                id='validator_deadbeef',
                signup_info=SignUpInfo(
                    poet_public_key='00112233445566778899aabbccddeeff',
                    nonce='nonce'))

        # create a mock_wait_certificate that does nothing in check_valid
        mock_wait_certificate = mock.Mock()
        mock_wait_certificate.check_valid.return_value = None

        mock_utils.deserialize_wait_certificate.return_value = \
            mock_wait_certificate

        # create a mock_consensus_state that returns a mock with
        # the following settings:
        mock_state = MockConsensusState.create_mock_consensus_state(
            committed_too_late=True)
        mock_consensus_state.consensus_state_for_block_id.return_value = \
            mock_state

        mock_consensus_state_store.return_value.__getitem__.return_value = \
            mock_consensus_state

        # Create mock key state
        mock_poet_key_state_store.return_value.__getitem__.return_value = \
            mock.Mock(
                sealed_signup_data='sealed signup data',
                has_been_refreshed=False)

        # create mock_signup_info
        mock_signup_info.create_signup_info.return_value = \
            mock.Mock(
                poet_public_key='poet public key',
                proof_data='proof data',
                anti_sybil_id='anti-sybil ID',
                sealed_signup_data='sealed signup data')
        mock_signup_info.block_id_to_nonce.return_value = 'nonce'
        mock_signup_info.unseal_signup_data.return_value = \
            '00112233445566778899aabbccddeeff'

        # create mock_batch_publisher
        context = create_context('secp256k1')
        private_key = context.new_random_private_key()
        crypto_factory = CryptoFactory(context)
        signer = crypto_factory.new_signer(private_key)

        mock_batch_publisher = mock.Mock(identity_signer=signer)

        mock_block_cache = mock.MagicMock()
        mock_state_view_factory = mock.Mock()

        # create mock_block_header with the following fields
        mock_block = mock.Mock(identifier='0123456789abcdefedcba9876543210')
        mock_block.header.signer_public_key = \
            '90834587139405781349807435098745'
        mock_block.header.previous_block_id = '2'
        mock_block.header.block_num = 1
        mock_block.header.state_root_hash = '6'
        mock_block.header.batch_ids = '4'

        # check test
        with mock.patch('sawtooth_poet.poet_consensus.poet_block_publisher.'
                        'LOGGER') as mock_logger:
            block_publisher = \
                poet_block_publisher.PoetBlockPublisher(
                    block_cache=mock_block_cache,
                    state_view_factory=mock_state_view_factory,
                    batch_publisher=mock_batch_publisher,
                    data_dir=self._temp_dir,
                    config_dir=self._temp_dir,
                    validator_id='validator_deadbeef')

            self.assertFalse(
                block_publisher.initialize_block(
                    block_header=mock_block.header))

            # Could be a hack, but verify that the appropriate log message is
            # generated - so we at least have some faith that the failure was
            # because of what we are testing and not something else.  I know
            # that this is fragile if the log message is changed, so would
            # accept any suggestions on a better way to verify that the
            # function fails for the reason we expect.
            self.assertTrue(
                any(
                    'Validator signup information not committed in a timely '
                    'manner.' in call[0][0] for call in
                    mock_logger.info.call_args_list))

            # check that create.signup_info() was called to create
            # the validator registry payload with new set of keys
            self.assertTrue(mock_signup_info.create_signup_info.called)
    def test_z_policy(
            self,
            mock_utils,
            mock_validator_registry_view,
            mock_consensus_state,
            mock_poet_enclave_factory,
            mock_consensus_state_store,
            mock_poet_key_state_store,
            mock_signup_info,
            mock_poet_settings_view,
            mock_block_wrapper):
        """ Z Policy: Test verifies that PoET Block Publisher fails
        if a validator attempts to claim more blocks frequently than is allowed
        """

        # create a mock_validator_registry_view with
        # get_validator_info that does nothing
        mock_validator_registry_view.return_value.get_validator_info. \
            return_value = \
            ValidatorInfo(
                name='validator_001',
                id='validator_deadbeef',
                signup_info=SignUpInfo(
                    poet_public_key='00112233445566778899aabbccddeeff'))

        # create a mock_wait_certificate that does nothing in check_valid
        mock_wait_certificate = mock.Mock()
        mock_wait_certificate.check_valid.return_value = None

        mock_utils.deserialize_wait_certificate.return_value = \
            mock_wait_certificate

        # create a mock_consensus_state that returns a mock with
        # the following settings:
        mock_state = MockConsensusState.create_mock_consensus_state(
            claiming_too_frequently=True)

        mock_consensus_state.consensus_state_for_block_id.return_value = \
            mock_state

        mock_consensus_state_store.return_value.__getitem__.return_value = \
            mock_consensus_state

        # Create mock key state
        mock_poet_key_state_store.return_value.__getitem__.return_value = \
            mock.Mock(
                sealed_signup_data='sealed signup data',
                has_been_refreshed=False)

        # create mock_signup_info
        mock_signup_info.unseal_signup_data.return_value = \
            '00112233445566778899aabbccddeeff'

        # create mock_batch_publisher
        context = create_context('secp256k1')
        private_key = context.new_random_private_key()
        crypto_factory = CryptoFactory(context)
        signer = crypto_factory.new_signer(private_key)

        mock_batch_publisher = mock.Mock(identity_signer=signer)

        mock_block_cache = mock.MagicMock()
        mock_state_view_factory = mock.Mock()

        # create mock_block_header with the following fields
        mock_block = mock.Mock(identifier='0123456789abcdefedcba9876543210')
        mock_block.header.signer_public_key = \
            '90834587139405781349807435098745'
        mock_block.header.previous_block_id = '2'
        mock_block.header.block_num = 1
        mock_block.header.state_root_hash = '6'
        mock_block.header.batch_ids = '4'

        # check test
        with mock.patch('sawtooth_poet.poet_consensus.poet_block_publisher.'
                        'LOGGER') as mock_logger:
            block_publisher = \
                poet_block_publisher.PoetBlockPublisher(
                    block_cache=mock_block_cache,
                    state_view_factory=mock_state_view_factory,
                    batch_publisher=mock_batch_publisher,
                    data_dir=self._temp_dir,
                    config_dir=self._temp_dir,
                    validator_id='validator_deadbeef')

            self.assertFalse(
                block_publisher.initialize_block(
                    block_header=mock_block.header))

            # Could be a hack, but verify that the appropriate log message is
            # generated - so we at least have some faith that the failure was
            # because of what we are testing and not something else.  I know
            # that this is fragile if the log message is changed, so would
            # accept any suggestions on a better way to verify that the
            # function fails for the reason we expect.

            (message, *_), _ = mock_logger.info.call_args
            self.assertTrue('is claiming blocks too '
                            'frequently' in message)
class SchedulerTester(object):
    """ The canonical form of the yaml is:
      -  <------------------------------------------ batch start
        state_hash: string. Optional. No default.
        - <----------------------------------------- transaction start
          inputs: list of string. Required.
            - ....
          outputs: list of string. Required.
            - ....
          addresses_to_set: list of dict. Optional.
            - string <address>: Optional bytes <value>
          addresses_to_delete: list of str. Optional
            - string <address>
          valid: boolean. Optional. Defaults to True
          dependencies: list of string. Optional. Defaults to empty list.
            - ..... string. No default. If a dependency is the same
                            string as a 'name' for another txn, that
                            txn's signature will be used for the
                            actual Transaction's dependency. If the
                            string is not an 'name' of another txn, if
                            it is longer than 20 characters it will be
                            used as if is is the actual
                            Transaction.header_signature for the
                            dependency. If not, it will be
                            disregarded.
          name: string. Optional. No default.
    """
    def __init__(self, file_name):
        """

        Args:
            file_name (str): The yaml filename and path.
            scheduler (scheduler.Scheduler): Any Scheduler implementaion
            context_manager (context_manager.ContextManager): The context
                manager holding state for this scheduler.
        """
        self._context = create_context('secp256k1')
        self._crypto_factory = CryptoFactory(self._context)
        self._yaml_file_name = file_name
        self._counter = itertools.count(0)
        self._referenced_txns_in_other_batches = {}
        self._batch_id_by_txn_id = {}
        self._txn_execution = {}

        self._batch_results = {}
        self._batches = []

        self._create_batches()

    @property
    def batch_results(self):
        """The batch results calculated from the yaml file.

        Returns:
            (dict): Computed from the yaml file, a dictionary with
                batch signature keys and BatchExecutionResult values.
        """
        return self._batch_results

    def run_scheduler(self,
                      scheduler,
                      context_manager,
                      validation_state_hash=None,
                      txns_executed_fifo=True):
        """Add all the batches to the scheduler in order and then run through
        the txns in the scheduler, calling next_transaction() after each
        transaction_execution_result is set.

        Args:
            scheduler (scheduler.Scheduler): Any implementation of the
                Scheduler abstract base class.
            context_manager (context_manager.ContextManager): The context
                manager is needed to store state based on the yaml file.
            validation_state_hash (str): Used in cases where the yaml
                represents a single block of valid batches, and the
                state hash is not in the yaml file. This state hash is added
                to the last batch in the scheduler.

        Returns batch_results (list of tuples): A list of tuples of
            batch signature, BatchExecutionResult pairs.
        """

        for i, batch in enumerate(self._batches):
            if i == len(self._batches) - 1 and \
                    validation_state_hash is not None:
                s_h = validation_state_hash
            else:
                s_h = self._batch_results[batch.header_signature].state_hash
            scheduler.add_batch(batch=batch, state_hash=s_h)

        scheduler.finalize()
        txns_to_process = deque()

        txn_context_by_txn_id = self._compute_transaction_execution_context()

        transactions_to_assert_state = {}
        while not scheduler.complete(block=False):
            stop = False
            while not stop:
                try:
                    txn_info = scheduler.next_transaction()
                except StopIteration:
                    break
                if txn_info is not None:
                    txns_to_process.append(txn_info)
                    LOGGER.debug("Transaction %s scheduled",
                                 txn_info.txn.header_signature[:16])
                else:
                    stop = True
            try:
                if txns_executed_fifo:
                    t_info = txns_to_process.popleft()
                else:
                    t_info = txns_to_process.pop()
            except IndexError:
                # No new txn was returned from next_transaction so
                # check again if complete.
                continue

            inputs, outputs = self._get_inputs_outputs(t_info.txn)

            c_id = context_manager.create_context(
                state_hash=t_info.state_hash,
                base_contexts=t_info.base_context_ids,
                inputs=inputs,
                outputs=outputs)

            t_id = t_info.txn.header_signature

            if t_id in txn_context_by_txn_id:
                state_up_to_now = txn_context_by_txn_id[t_id].state
                txn_context = txn_context_by_txn_id[t_id]
                inputs, _ = self._get_inputs_outputs(txn_context.txn)
                addresses = [input for input in inputs if len(input) == 70]
                state_found = context_manager.get(context_id=c_id,
                                                  address_list=addresses)

                LOGGER.debug(
                    "Transaction Id %s, Batch %s, Txn %s, "
                    "Context_id %s, Base Contexts %s", t_id[:16],
                    txn_context.batch_num, txn_context.txn_num, c_id,
                    t_info.base_context_ids)

                state_to_assert = [(add, state_up_to_now.get(add))
                                   for add, _ in state_found]
                transactions_to_assert_state[t_id] = (txn_context, state_found,
                                                      state_to_assert)

            validity, address_values, deletes = self._txn_execution[
                t_info.txn.header_signature]

            context_manager.set(context_id=c_id,
                                address_value_list=address_values)

            context_manager.delete(context_id=c_id, address_list=deletes)
            LOGGER.debug("Transaction %s is %s", t_id[:16],
                         'valid' if validity else 'invalid')
            scheduler.set_transaction_execution_result(
                txn_signature=t_info.txn.header_signature,
                is_valid=validity,
                context_id=c_id)

        batch_ids = [b.header_signature for b in self._batches]
        batch_results = [(b_id, scheduler.get_batch_execution_result(b_id))
                         for b_id in batch_ids]

        return batch_results, transactions_to_assert_state

    def run_scheduler_alternating(self,
                                  scheduler,
                                  context_manager,
                                  validation_state_hash=None,
                                  txns_executed_fifo=True):
        batches = deque()
        batches.extend(self._batches)

        txns_to_process = deque()

        txn_context_by_txn_id = self._compute_transaction_execution_context()

        transactions_to_assert_state = {}
        while not scheduler.complete(block=False):
            stop = False
            while not stop:
                try:
                    txn_info = scheduler.next_transaction()
                except StopIteration:
                    stop = True

                if txn_info is not None:
                    txns_to_process.append(txn_info)
                    LOGGER.debug("Transaction %s scheduled",
                                 txn_info.txn.header_signature[:16])
                else:
                    stop = True

            try:
                scheduler.add_batch(batches.popleft())
            except IndexError:
                scheduler.finalize()

            try:
                if txns_executed_fifo:
                    t_info = txns_to_process.popleft()
                else:
                    t_info = txns_to_process.pop()
            except IndexError:
                # No new txn was returned from next_transaction so
                # check again if complete.
                continue

            inputs, outputs = self._get_inputs_outputs(t_info.txn)

            c_id = context_manager.create_context(
                state_hash=t_info.state_hash,
                base_contexts=t_info.base_context_ids,
                inputs=inputs,
                outputs=outputs)

            t_id = t_info.txn.header_signature

            if t_id in txn_context_by_txn_id:
                state_up_to_now = txn_context_by_txn_id[t_id].state
                txn_context = txn_context_by_txn_id[t_id]
                inputs, _ = self._get_inputs_outputs(txn_context.txn)
                addresses = [input for input in inputs if len(input) == 70]
                state_found = context_manager.get(context_id=c_id,
                                                  address_list=addresses)

                LOGGER.debug(
                    "Transaction Id %s, Batch %s, Txn %s, "
                    "Context_id %s, Base Contexts %s", t_id[:16],
                    txn_context.batch_num, txn_context.txn_num, c_id,
                    t_info.base_context_ids)

                state_to_assert = [(add, state_up_to_now.get(add))
                                   for add, _ in state_found]
                transactions_to_assert_state[t_id] = (txn_context, state_found,
                                                      state_to_assert)

            validity, address_values, deletes = self._txn_execution[
                t_info.txn.header_signature]

            context_manager.set(context_id=c_id,
                                address_value_list=address_values)
            context_manager.delete(context_id=c_id, address_list=deletes)
            LOGGER.debug("Transaction %s is %s", t_id[:16],
                         'valid' if validity else 'invalid')
            scheduler.set_transaction_execution_result(
                txn_signature=t_info.txn.header_signature,
                is_valid=validity,
                context_id=c_id)

        batch_ids = [b.header_signature for b in self._batches]
        batch_results = [(b_id, scheduler.get_batch_execution_result(b_id))
                         for b_id in batch_ids]

        return batch_results, transactions_to_assert_state

    def compute_state_hashes_wo_scheduler(self):
        """Creates a state hash from the state updates from each txn in a
        valid batch.

        Returns state_hashes (list of str): The merkle roots from state
            changes in 1 or more blocks in the yaml file.

        """

        tree = MerkleDatabase(database=DictDatabase())
        state_hashes = []
        updates = {}
        for batch in self._batches:
            b_id = batch.header_signature
            result = self._batch_results[b_id]
            if result.is_valid:
                for txn in batch.transactions:
                    txn_id = txn.header_signature
                    _, address_values, deletes = self._txn_execution[txn_id]
                    batch_updates = {}
                    for pair in address_values:
                        batch_updates.update({a: pair[a] for a in pair.keys()})

                    # since this is entirely serial, any overwrite
                    # of an address is expected and desirable.
                    updates.update(batch_updates)

                    for address in deletes:
                        if address in updates:
                            del updates[address]

            # This handles yaml files that have state roots in them
            if result.state_hash is not None:
                s_h = tree.update(set_items=updates, virtual=False)
                tree.set_merkle_root(merkle_root=s_h)
                state_hashes.append(s_h)
        if not state_hashes:
            state_hashes.append(tree.update(set_items=updates))
        return state_hashes

    def _compute_transaction_execution_context(self):
        """Compute the serial state for each txn in the yaml file up to and
        including the invalid txn in each invalid batch.

        Notes:
            The TransactionExecutionContext for a txn will contain the
            state applied serially up to that point for each valid batch and
            then for invalid batches up to the invalid txn.

        Returns:
            dict: The transaction id to the TransactionExecutionContext
        """
        transaction_contexts = {}
        state_up_to_now = {}

        for batch_num, batch in enumerate(self._batches):
            partial_batch_transaction_contexts = {}
            partial_batch_state_up_to_now = state_up_to_now.copy()
            for txn_num, txn in enumerate(batch.transactions):
                t_id = txn.header_signature
                is_valid, address_values, deletes = self._txn_execution[t_id]
                partial_batch_transaction_contexts[t_id] = \
                    TransactionExecutionContext(
                        txn=txn,
                        txn_num=txn_num + 1,
                        batch_num=batch_num + 1,
                        state=partial_batch_state_up_to_now.copy())

                for item in address_values:
                    partial_batch_state_up_to_now.update(item)
                for address in deletes:
                    if address in partial_batch_state_up_to_now:
                        partial_batch_state_up_to_now[address] = None
                if not is_valid:
                    break
            batch_id = batch.header_signature
            batch_is_valid = self._batch_results[batch_id].is_valid

            if batch_is_valid:
                transaction_contexts.update(partial_batch_transaction_contexts)
                state_up_to_now.update(partial_batch_state_up_to_now)

        return transaction_contexts

    def _address(self, add, require_full=False):
        if ':sha' not in add and ',' not in add:
            return add

        if ',' in add:
            return binascii.hexlify(bytearray([int(i)
                                               for i in add.split(',')]))

        parts = add.split(':')
        if len(parts) == 3 and parts[2] == 'sha':
            # eg. 'yy:aaabbbb:sha'
            namespace = hashlib.sha512(parts[0].encode()).hexdigest()[:6]
            address = namespace + hashlib.sha512(
                parts[1].encode()).hexdigest()[:64]
        elif len(parts) == 3 and not require_full:
            # eg. 'a:sha:56'
            length = min(int(parts[2]), 70)
            address = hashlib.sha512(parts[0].encode()).hexdigest()[:length]
        elif len(parts) == 2:
            # eg. 'aaabbbb:sha'
            intermediate = parts[0]
            address = hashlib.sha512(intermediate.encode()).hexdigest()[:70]
        else:
            raise ValueError("Address specified by {} could "
                             "not be formed".format(add))
        return address

    def _get_inputs_outputs(self, txn):
        """Similarly to the TransactionExecutor, deserialize the inputs and
         outputs.

         Notes:
             The SchedulerTester has the inputs and outputs from the yaml file
             that it used to create the transaction, but it seems less
             error-prone to recreate the behavior of the TransactionExecutor.

        Args:
            txn (sawtooth_validator.protobuf.transaction_pb2.Transaction)

        Returns (tuple): (inputs, outputs)

        """

        header = transaction_pb2.TransactionHeader()
        header.ParseFromString(txn.header)

        return list(header.inputs), list(header.outputs)

    def _bytes_if_none(self, value):
        if value is None:
            value = uuid.uuid4().hex.encode()
        return value

    def _yaml_from_file(self):
        with open(self._yaml_file_name, 'r') as infile:
            test_yaml = yaml.safe_load(infile)
        return test_yaml

    def _contains_and_not_none(self, key, obj):
        return key in obj and obj[key] is not None

    def _process_batches(self, yaml_batches, signer):
        batches = []
        b_results = {}
        for batch in yaml_batches:
            batch_state_root = None
            if self._contains_and_not_none('state_hash', batch):
                batch_state_root = batch['state_hash']

            txn_processing_result = self._process_txns(
                batch=batch,
                previous_batch_results=b_results.copy(),
                signer=signer)
            txns, batch_is_valid = txn_processing_result
            batch_real = create_batch(transactions=txns, signer=signer)
            for txn in txns:
                txn_id = txn.header_signature
                batch_id = batch_real.header_signature
                self._batch_id_by_txn_id[txn_id] = batch_id

            b_results[batch_real.header_signature] = BatchExecutionResult(
                is_valid=batch_is_valid, state_hash=batch_state_root)
            batches.append(batch_real)
        return batches, b_results

    def _dependencies_are_valid(self, dependencies, previous_batch_results):
        for dep in dependencies:
            if dep in self._batch_id_by_txn_id:
                batch_id = self._batch_id_by_txn_id[dep]
                dep_result = previous_batch_results[batch_id]
                if not dep_result.is_valid:
                    return False
        return True

    def _process_txns(self, batch, previous_batch_results, signer):
        txns = []
        referenced_txns = {}
        execution = {}
        batch_is_valid = True
        for transaction in batch:
            is_valid = True
            addresses_to_set = []
            addresses_to_delete = []
            inputs = transaction['inputs']
            outputs = transaction['outputs']
            inputs_real = [self._address(a) for a in inputs]
            outputs_real = [self._address(a) for a in outputs]
            if self._contains_and_not_none('addresses_to_set', transaction):
                addresses_to_set = [{
                    self._address(a, require_full=True):
                    self._bytes_if_none(d[a])
                    for a in d
                } for d in transaction['addresses_to_set']]
            if self._contains_and_not_none('addresses_to_delete', transaction):
                addresses_to_delete = [
                    self._address(a, require_full=True)
                    for a in transaction['addresses_to_delete']
                ]

            if self._contains_and_not_none('dependencies', transaction):
                if any([
                        a not in self._referenced_txns_in_other_batches
                        and len(a) <= 20 for a in transaction['dependencies']
                ]):
                    # This txn has a dependency with a txn signature that is
                    # not known about,
                    return None

                dependencies = [
                    self._referenced_txns_in_other_batches[a]
                    if a in self._referenced_txns_in_other_batches else a
                    for a in transaction['dependencies']
                ]
                dependencies = [a for a in dependencies if len(a) > 20]
            else:
                dependencies = []

            deps_valid = self._dependencies_are_valid(dependencies,
                                                      previous_batch_results)

            if self._contains_and_not_none('valid', transaction):
                is_valid = bool(transaction['valid'])

            if not is_valid or not deps_valid:
                batch_is_valid = False

            txn, _ = create_transaction(payload=uuid.uuid4().hex.encode(),
                                        dependencies=dependencies,
                                        inputs=inputs_real,
                                        outputs=outputs_real,
                                        signer=signer)

            if self._contains_and_not_none('name', transaction):
                referenced_txns[transaction['name']] = txn.header_signature

            execution[txn.header_signature] = (is_valid, addresses_to_set,
                                               addresses_to_delete)
            txns.append(txn)

        self._txn_execution.update(execution)
        self._referenced_txns_in_other_batches.update(referenced_txns)
        return txns, batch_is_valid

    def _create_batches(self):
        test_yaml = self._yaml_from_file()
        private_key = self._context.new_random_private_key()
        signer = self._crypto_factory.new_signer(private_key)

        batches, batch_results = self._process_batches(yaml_batches=test_yaml,
                                                       signer=signer)

        self._batch_results = batch_results
        self._batches = batches
Esempio n. 47
0
 def setUp(self):
     self._temp_dir = tempfile.mkdtemp()
     context = create_context('secp256k1')
     private_key = context.new_random_private_key()
     crypto_factory = CryptoFactory(context)
     self._signer = crypto_factory.new_signer(private_key)
    def test_block_publisher_finalize_block(
            self,
            mock_utils,
            mock_validator_registry_view,
            mock_consensus_state,
            mock_poet_enclave_factory,
            mock_consensus_state_store,
            mock_poet_key_state_store,
            mock_signup_info,
            mock_wait_certificate,
            mock_poet_settings_view,
            mock_block_wrapper):
        """ Test verifies that PoET Block Publisher finalizes the block,
            meaning that the candidate block is good and should be generated.
        """

        # create a mock_validator_registry_view with
        # get_validator_info that does nothing
        mock_validator_registry_view.return_value.get_validator_info. \
            return_value = \
            ValidatorInfo(
                name='validator_001',
                id='validator_deadbeef',
                signup_info=SignUpInfo(
                    poet_public_key='00112233445566778899aabbccddeeff'))

        # create a mock_wait_certificate that does nothing in check_valid
        my_wait_certificate = mock.Mock()
        my_wait_certificate.check_valid.return_value = None
        mock_wait_certificate.create_wait_certificate.return_value = \
            my_wait_certificate

        # create a mock_consensus_state that returns a mock with
        # the following settings:
        mock_state = MockConsensusState().create_mock_consensus_state()

        mock_consensus_state.consensus_state_for_block_id.return_value = \
            mock_state

        # create mock_batch_publisher
        context = create_context('secp256k1')
        private_key = context.new_random_private_key()
        crypto_factory = CryptoFactory(context)
        signer = crypto_factory.new_signer(private_key)
        mock_batch_publisher = mock.Mock(
            identity_signer=signer)

        mock_block_cache = mock.MagicMock()
        mock_state_view_factory = mock.Mock()

        # create mock_block_header with the following fields
        mock_block = mock.Mock(identifier='0123456789abcdefedcba9876543210')
        mock_block.header.signer_public_key = \
            '90834587139405781349807435098745'
        mock_block.header.previous_block_id = '2'
        mock_block.header.block_num = 1
        mock_block.header.state_root_hash = '6'
        mock_block.header.batch_ids = '4'

        # check test
        block_publisher = \
            poet_block_publisher.PoetBlockPublisher(
                block_cache=mock_block_cache,
                state_view_factory=mock_state_view_factory,
                batch_publisher=mock_batch_publisher,
                data_dir=self._temp_dir,
                config_dir=self._temp_dir,
                validator_id='validator_deadbeef')

        with mock.patch('sawtooth_poet.poet_consensus.'
                        'poet_block_publisher.json') as _:
            self.assertTrue(block_publisher.finalize_block(
                block_header=mock_block.header))
    def test_block_publisher_doesnt_finalize_block(
            self,
            mock_utils,
            mock_validator_registry_view,
            mock_consensus_state,
            mock_poet_enclave_factory,
            mock_consensus_state_store,
            mock_poet_key_state_store,
            mock_signup_info,
            mock_wait_certificate,
            mock_poet_settings_view,
            mock_block_wrapper):
        """ Test verifies that PoET Block Publisher doesn't finalize
            a candidate block that doesn't have a valid wait certificate.
        """

        # create a mock_validator_registry_view with
        # get_validator_info that does nothing
        mock_validator_registry_view.return_value.get_validator_info. \
            return_value = \
            ValidatorInfo(
                name='validator_001',
                id='validator_deadbeef',
                signup_info=SignUpInfo(
                    poet_public_key='00112233445566778899aabbccddeeff'))

        # create a mock_wait_certificate that pretends to fail
        mock_wait_certificate.create_wait_certificate.side_effect = \
            ValueError('Unit test fake failure')

        # create a mock_consensus_state that returns a mock with
        # the following settings:
        mock_state = MockConsensusState().create_mock_consensus_state()

        mock_consensus_state.consensus_state_for_block_id.return_value = \
            mock_state

        # create mock_batch_publisher
        context = create_context('secp256k1')
        private_key = context.new_random_private_key()
        crypto_factory = CryptoFactory(context)
        signer = crypto_factory.new_signer(private_key)
        mock_batch_publisher = mock.Mock(
            identity_signer=signer)

        mock_block_cache = mock.MagicMock()
        mock_state_view_factory = mock.Mock()

        # create mock_block_header with the following fields
        mock_block = mock.Mock(identifier='0123456789abcdefedcba9876543210')
        mock_block.header.signer_public_key = \
            '90834587139405781349807435098745'
        mock_block.header.previous_block_id = '2'
        mock_block.header.block_num = 1
        mock_block.header.state_root_hash = '6'
        mock_block.header.batch_ids = '4'

        # check test
        with mock.patch('sawtooth_poet.poet_consensus.poet_block_publisher.'
                        'LOGGER') as mock_logger:
            block_publisher = \
                poet_block_publisher.PoetBlockPublisher(
                    block_cache=mock_block_cache,
                    state_view_factory=mock_state_view_factory,
                    batch_publisher=mock_batch_publisher,
                    data_dir=self._temp_dir,
                    config_dir=self._temp_dir,
                    validator_id='validator_deadbeef')

            with mock.patch('sawtooth_poet.poet_consensus.'
                            'poet_block_publisher.json') as _:
                self.assertFalse(
                    block_publisher.finalize_block(
                        block_header=mock_block.header))

            # Could be a hack, but verify that the appropriate log message is
            # generated - so we at least have some faith that the failure was
            # because of what we are testing and not something else.  I know
            # that this is fragile if the log message is changed, so would
            # accept any suggestions on a better way to verify that the
            # function fails for the reason we expect.

            (message, *_), _ = mock_logger.error.call_args
            self.assertTrue('Failed to create wait certificate: '
                            in message)
Esempio n. 50
0
 def setUp(self):
     self._temp_dir = tempfile.mkdtemp()
     context = create_context('secp256k1')
     private_key = context.new_random_private_key()
     crypto_factory = CryptoFactory(context)
     self._signer = crypto_factory.new_signer(private_key)
    def test_block_publisher_doesnt_claim_readiness(
            self,
            mock_utils,
            mock_validator_registry_view,
            mock_consensus_state,
            mock_poet_enclave_factory,
            mock_consensus_state_store,
            mock_poet_key_state_store,
            mock_signup_info,
            mock_wait_time,
            mock_poet_settings_view,
            mock_block_wrapper):
        """ Test verifies that PoET Block Publisher doesn't
         claims readiness if the wait timer hasn't expired
        """

        # create a mock_validator_registry_view with
        # get_validator_info that does nothing
        mock_validator_registry_view.return_value.get_validator_info. \
            return_value = \
            ValidatorInfo(
                name='validator_001',
                id='validator_deadbeef',
                signup_info=SignUpInfo(
                    poet_public_key='00112233445566778899aabbccddeeff'))

        # create a mock_consensus_state that returns a mock with
        # the following settings:
        mock_state = MockConsensusState.create_mock_consensus_state()

        mock_consensus_state.consensus_state_for_block_id.return_value = \
            mock_state

        mock_consensus_state_store.return_value.__getitem__.return_value = \
            mock_consensus_state

        # Create mock key state
        mock_poet_key_state_store.return_value.__getitem__.return_value = \
            mock.Mock(
                sealed_signup_data='sealed signup data',
                has_been_refreshed=False)

        # create mock_signup_info
        mock_signup_info.unseal_signup_data.return_value = \
            '00112233445566778899aabbccddeeff'

        # create mock_batch_publisher
        context = create_context('secp256k1')
        private_key = context.new_random_private_key()
        crypto_factory = CryptoFactory(context)
        signer = crypto_factory.new_signer(private_key)

        mock_batch_publisher = mock.Mock(identity_signer=signer)

        mock_block_cache = mock.MagicMock()
        mock_state_view_factory = mock.Mock()

        # create mock_block_header with the following fields
        mock_block = mock.Mock(identifier='0123456789abcdefedcba9876543210')
        mock_block.header.signer_public_key = \
            '90834587139405781349807435098745'
        mock_block.header.previous_block_id = '2'
        mock_block.header.block_num = 1
        mock_block.header.state_root_hash = '6'
        mock_block.header.batch_ids = '4'

        # create a mock_wait_timer that hasn't expired yet
        my_wait_time = mock.Mock()
        my_wait_time.has_expired.return_value = False

        mock_wait_time.create_wait_timer.return_value = my_wait_time

        # create mock_poet_enclave_module
        mock_poet_enclave_module = mock.Mock()
        mock_poet_enclave_module.return_value = \
            mock_poet_enclave_factory.get_poet_enclave_module.return_value

        # check test
        block_publisher = \
            poet_block_publisher.PoetBlockPublisher(
                block_cache=mock_block_cache,
                state_view_factory=mock_state_view_factory,
                batch_publisher=mock_batch_publisher,
                data_dir=self._temp_dir,
                config_dir=self._temp_dir,
                validator_id='validator_deadbeef')

        # check initialize_block() first to set wait_timer
        self.assertTrue(
            block_publisher.initialize_block(
                block_header=mock_block.header))

        # check that block_publisher only claims readiness
        # when the wait_timer has expired
        self.assertFalse(
            block_publisher.check_publish_block(
                block_header=mock_block.header))