def validate(self): """ Checks if incoming blocks are valid or not """ # create a bigchain instance. All processes should create their own bigchcain instance so that they all # have their own connection to the database b = Bigchain() logger.info('voter waiting for new blocks') # signal initialization complete self.initialized.set() while True: new_block = self.q_blocks_to_validate.get() # poison pill if new_block == 'stop': self.q_validated_block.put('stop') return logger.info('new_block arrived to voter') with self.monitor.timer('validate_block'): # FIXME: the following check is done also in `is_valid_block`, # but validity can be true even if the block has already # a vote. if b.has_previous_vote(new_block): continue validity = b.is_valid_block(new_block) self.q_validated_block.put( (new_block, self.v_previous_block_id.value.decode(), validity)) self.v_previous_block_id.value = new_block['id'].encode()
def validate(self): """ Checks if incoming blocks are valid or not """ # create a bigchain instance. All processes should create their own bigchcain instance so that they all # have their own connection to the database b = Bigchain() logger.info('voter waiting for new blocks') # signal initialization complete self.initialized.set() while True: new_block = self.q_blocks_to_validate.get() # poison pill if new_block == 'stop': self.q_validated_block.put('stop') return logger.info('new_block arrived to voter') with self.monitor.timer('validate_block'): # FIXME: the following check is done also in `is_valid_block`, # but validity can be true even if the block has already # a vote. if b.has_previous_vote(new_block): continue validity = b.is_valid_block(new_block) self.q_validated_block.put((new_block, self.v_previous_block_id.value.decode(), validity)) self.v_previous_block_id.value = new_block['id'].encode()
class Vote: """This class encapsulates the logic to vote on blocks. Note: Methods of this class will be executed in different processes. """ def __init__(self): """Initialize the Block voter.""" # Since cannot share a connection to RethinkDB using multiprocessing, # we need to create a temporary instance of BigchainDB that we use # only to query RethinkDB # This is the Bigchain instance that will be "shared" (aka: copied) # by all the subprocesses self.bigchain = Bigchain() self.last_voted_id = Bigchain().get_last_voted_block().id self.counters = Counter() self.blocks_validity_status = {} dummy_tx = Transaction.create([self.bigchain.me], [([self.bigchain.me], 1)]).to_dict() self.invalid_dummy_tx = dummy_tx def validate_block(self, block_dict): if not self.bigchain.has_previous_vote(block_dict['id']): try: block = Block.from_db( self.bigchain, block_dict, from_dict_kwargs={'tx_construct': FastTransaction}) except (exceptions.InvalidHash): # XXX: if a block is invalid we should skip the `validate_tx` # step, but since we are in a pipeline we cannot just jump to # another function. Hackish solution: generate an invalid # transaction and propagate it to the next steps of the # pipeline. return block_dict['id'], [self.invalid_dummy_tx] try: block._validate_block(self.bigchain) except exceptions.ValidationError: # XXX: if a block is invalid we should skip the `validate_tx` # step, but since we are in a pipeline we cannot just jump to # another function. Hackish solution: generate an invalid # transaction and propagate it to the next steps of the # pipeline. return block.id, [self.invalid_dummy_tx] return block.id, block_dict['block']['transactions'] def ungroup(self, block_id, transactions): """Given a block, ungroup the transactions in it. Args: block_id (str): the id of the block in progress. transactions (list(dict)): transactions of the block in progress. Returns: ``None`` if the block has been already voted, an iterator that yields a transaction, block id, and the total number of transactions contained in the block otherwise. """ num_tx = len(transactions) for tx in transactions: yield tx, block_id, num_tx def validate_tx(self, tx_dict, block_id, num_tx): """Validate a transaction. Transaction must also not be in any VALID block. Args: tx_dict (dict): the transaction to validate block_id (str): the id of block containing the transaction num_tx (int): the total number of transactions to process Returns: Three values are returned, the validity of the transaction, ``block_id``, ``num_tx``. """ try: tx = Transaction.from_dict(tx_dict) new = self.bigchain.is_new_transaction(tx.id, exclude_block_id=block_id) if not new: raise exceptions.ValidationError('Tx already exists, %s', tx.id) tx.validate(self.bigchain) valid = True except exceptions.ValidationError as e: valid = False logger.warning('Invalid tx: %s', e) return valid, block_id, num_tx def vote(self, tx_validity, block_id, num_tx): """Collect the validity of transactions and cast a vote when ready. Args: tx_validity (bool): the validity of the transaction block_id (str): the id of block containing the transaction num_tx (int): the total number of transactions to process Returns: None, or a vote if a decision has been reached. """ self.counters[block_id] += 1 self.blocks_validity_status[ block_id] = tx_validity and self.blocks_validity_status.get( block_id, True) if self.counters[block_id] == num_tx: vote = self.bigchain.vote(block_id, self.last_voted_id, self.blocks_validity_status[block_id]) self.last_voted_id = block_id del self.counters[block_id] del self.blocks_validity_status[block_id] return vote, num_tx def write_vote(self, vote, num_tx): """Write vote to the database. Args: vote: the vote to write. """ validity = 'valid' if vote['vote']['is_block_valid'] else 'invalid' logger.info("Voting '%s' for block %s", validity, vote['vote']['voting_for_block']) self.bigchain.write_vote(vote) self.bigchain.statsd.incr('pipelines.vote.throughput', num_tx) return vote
class Vote: """This class encapsulates the logic to vote on blocks. Note: Methods of this class will be executed in different processes. """ def __init__(self): """Initialize the Block voter.""" # Since cannot share a connection to RethinkDB using multiprocessing, # we need to create a temporary instance of BigchainDB that we use # only to query RethinkDB self.consensus = BaseConsensusRules # This is the Bigchain instance that will be "shared" (aka: copied) # by all the subprocesses self.bigchain = Bigchain() self.last_voted_id = Bigchain().get_last_voted_block().id self.counters = Counter() self.validity = {} self.invalid_dummy_tx = Transaction.create([self.bigchain.me], [self.bigchain.me]) def validate_block(self, block): if not self.bigchain.has_previous_vote(block['id'], block['block']['voters']): try: block = Block.from_dict(block) except (exceptions.InvalidHash, exceptions.InvalidSignature): # XXX: if a block is invalid we should skip the `validate_tx` # step, but since we are in a pipeline we cannot just jump to # another function. Hackish solution: generate an invalid # transaction and propagate it to the next steps of the # pipeline. return block['id'], [self.invalid_dummy_tx] try: self.consensus.validate_block(self.bigchain, block) except (exceptions.InvalidHash, exceptions.OperationError, exceptions.InvalidSignature): # XXX: if a block is invalid we should skip the `validate_tx` # step, but since we are in a pipeline we cannot just jump to # another function. Hackish solution: generate an invalid # transaction and propagate it to the next steps of the # pipeline. return block.id, [self.invalid_dummy_tx] return block.id, block.transactions def ungroup(self, block_id, transactions): """Given a block, ungroup the transactions in it. Args: block_id (str): the id of the block in progress. transactions (list(Transaction)): transactions of the block in progress. Returns: ``None`` if the block has been already voted, an iterator that yields a transaction, block id, and the total number of transactions contained in the block otherwise. """ num_tx = len(transactions) for tx in transactions: yield tx, block_id, num_tx def validate_tx(self, tx, block_id, num_tx): """Validate a transaction. Args: tx (dict): the transaction to validate block_id (str): the id of block containing the transaction num_tx (int): the total number of transactions to process Returns: Three values are returned, the validity of the transaction, ``block_id``, ``num_tx``. """ return bool(self.bigchain.is_valid_transaction(tx)), block_id, num_tx def vote(self, tx_validity, block_id, num_tx): """Collect the validity of transactions and cast a vote when ready. Args: tx_validity (bool): the validity of the transaction block_id (str): the id of block containing the transaction num_tx (int): the total number of transactions to process Returns: None, or a vote if a decision has been reached. """ self.counters[block_id] += 1 self.validity[block_id] = tx_validity and self.validity.get(block_id, True) if self.counters[block_id] == num_tx: vote = self.bigchain.vote(block_id, self.last_voted_id, self.validity[block_id]) self.last_voted_id = block_id del self.counters[block_id] del self.validity[block_id] return vote def write_vote(self, vote): """Write vote to the database. Args: vote: the vote to write. """ self.bigchain.write_vote(vote) return vote
class Vote: """This class encapsulates the logic to vote on blocks. Note: Methods of this class will be executed in different processes. """ def __init__(self): """Initialize the Block voter.""" # Since cannot share a connection to RethinkDB using multiprocessing, # we need to create a temporary instance of BigchainDB that we use # only to query RethinkDB self.consensus = BaseConsensusRules # This is the Bigchain instance that will be "shared" (aka: copied) # by all the subprocesses self.bigchain = Bigchain() self.last_voted_id = Bigchain().get_last_voted_block().id self.counters = Counter() self.validity = {} self.invalid_dummy_tx = Transaction.create([self.bigchain.me], [([self.bigchain.me], 1)]) def validate_block(self, block): if not self.bigchain.has_previous_vote(block['id'], block['block']['voters']): try: block = Block.from_dict(block) except (exceptions.InvalidHash, exceptions.InvalidSignature): # XXX: if a block is invalid we should skip the `validate_tx` # step, but since we are in a pipeline we cannot just jump to # another function. Hackish solution: generate an invalid # transaction and propagate it to the next steps of the # pipeline. return block['id'], [self.invalid_dummy_tx] try: self.consensus.validate_block(self.bigchain, block) except (exceptions.InvalidHash, exceptions.OperationError, exceptions.InvalidSignature): # XXX: if a block is invalid we should skip the `validate_tx` # step, but since we are in a pipeline we cannot just jump to # another function. Hackish solution: generate an invalid # transaction and propagate it to the next steps of the # pipeline. return block.id, [self.invalid_dummy_tx] return block.id, block.transactions def ungroup(self, block_id, transactions): """Given a block, ungroup the transactions in it. Args: block_id (str): the id of the block in progress. transactions (list(Transaction)): transactions of the block in progress. Returns: ``None`` if the block has been already voted, an iterator that yields a transaction, block id, and the total number of transactions contained in the block otherwise. """ num_tx = len(transactions) for tx in transactions: yield tx, block_id, num_tx def validate_tx(self, tx, block_id, num_tx): """Validate a transaction. Args: tx (dict): the transaction to validate block_id (str): the id of block containing the transaction num_tx (int): the total number of transactions to process Returns: Three values are returned, the validity of the transaction, ``block_id``, ``num_tx``. """ return bool(self.bigchain.is_valid_transaction(tx)), block_id, num_tx def vote(self, tx_validity, block_id, num_tx): """Collect the validity of transactions and cast a vote when ready. Args: tx_validity (bool): the validity of the transaction block_id (str): the id of block containing the transaction num_tx (int): the total number of transactions to process Returns: None, or a vote if a decision has been reached. """ self.counters[block_id] += 1 self.validity[block_id] = tx_validity and self.validity.get(block_id, True) if self.counters[block_id] == num_tx: vote = self.bigchain.vote(block_id, self.last_voted_id, self.validity[block_id]) self.last_voted_id = block_id del self.counters[block_id] del self.validity[block_id] return vote def write_vote(self, vote): """Write vote to the database. Args: vote: the vote to write. """ self.bigchain.write_vote(vote) return vote
class Vote: """This class encapsulates the logic to vote on blocks. Note: Methods of this class will be executed in different processes. """ def __init__(self): """Initialize the Block voter.""" # Since cannot share a connection to RethinkDB using multiprocessing, # we need to create a temporary instance of BigchainDB that we use # only to query RethinkDB # This is the Bigchain instance that will be "shared" (aka: copied) # by all the subprocesses self.bigchain = Bigchain() self.last_voted_id = Bigchain().get_last_voted_block().id self.counters = Counter() self.blocks_validity_status = {} dummy_tx = Transaction.create([self.bigchain.me], [([self.bigchain.me], 1)]).to_dict() self.invalid_dummy_tx = dummy_tx def validate_block(self, block_dict): if not self.bigchain.has_previous_vote(block_dict['id']): try: block = Block.from_db(self.bigchain, block_dict, from_dict_kwargs={ 'tx_construct': FastTransaction }) except (exceptions.InvalidHash): # XXX: if a block is invalid we should skip the `validate_tx` # step, but since we are in a pipeline we cannot just jump to # another function. Hackish solution: generate an invalid # transaction and propagate it to the next steps of the # pipeline. return block_dict['id'], [self.invalid_dummy_tx] try: block._validate_block(self.bigchain) except exceptions.ValidationError: # XXX: if a block is invalid we should skip the `validate_tx` # step, but since we are in a pipeline we cannot just jump to # another function. Hackish solution: generate an invalid # transaction and propagate it to the next steps of the # pipeline. return block.id, [self.invalid_dummy_tx] return block.id, block_dict['block']['transactions'] def ungroup(self, block_id, transactions): """Given a block, ungroup the transactions in it. Args: block_id (str): the id of the block in progress. transactions (list(dict)): transactions of the block in progress. Returns: ``None`` if the block has been already voted, an iterator that yields a transaction, block id, and the total number of transactions contained in the block otherwise. """ num_tx = len(transactions) for tx in transactions: yield tx, block_id, num_tx def validate_tx(self, tx_dict, block_id, num_tx): """Validate a transaction. Transaction must also not be in any VALID block. Args: tx_dict (dict): the transaction to validate block_id (str): the id of block containing the transaction num_tx (int): the total number of transactions to process Returns: Three values are returned, the validity of the transaction, ``block_id``, ``num_tx``. """ try: tx = Transaction.from_dict(tx_dict) new = self.bigchain.is_new_transaction(tx.id, exclude_block_id=block_id) if not new: raise exceptions.ValidationError('Tx already exists, %s', tx.id) tx.validate(self.bigchain) valid = True except exceptions.ValidationError as e: valid = False logger.warning('Invalid tx: %s', e) return valid, block_id, num_tx def vote(self, tx_validity, block_id, num_tx): """Collect the validity of transactions and cast a vote when ready. Args: tx_validity (bool): the validity of the transaction block_id (str): the id of block containing the transaction num_tx (int): the total number of transactions to process Returns: None, or a vote if a decision has been reached. """ self.counters[block_id] += 1 self.blocks_validity_status[block_id] = tx_validity and self.blocks_validity_status.get(block_id, True) if self.counters[block_id] == num_tx: vote = self.bigchain.vote(block_id, self.last_voted_id, self.blocks_validity_status[block_id]) self.last_voted_id = block_id del self.counters[block_id] del self.blocks_validity_status[block_id] return vote, num_tx def write_vote(self, vote, num_tx): """Write vote to the database. Args: vote: the vote to write. """ validity = 'valid' if vote['vote']['is_block_valid'] else 'invalid' logger.info("Voting '%s' for block %s", validity, vote['vote']['voting_for_block']) self.bigchain.write_vote(vote) self.bigchain.statsd.incr('pipelines.vote.throughput', num_tx) return vote