def validate_new_tx(self, tx: BaseTransaction, skip_block_weight_verification: bool = False) -> bool: """ Process incoming transaction during initialization. These transactions came only from storage. """ assert tx.hash is not None if self.state == self.NodeState.INITIALIZING: if tx.is_genesis: return True else: if tx.is_genesis: raise InvalidNewTransaction('Genesis? {}'.format(tx.hash_hex)) if tx.timestamp - self.reactor.seconds( ) > settings.MAX_FUTURE_TIMESTAMP_ALLOWED: raise InvalidNewTransaction( 'Ignoring transaction in the future {} (timestamp={})'.format( tx.hash_hex, tx.timestamp)) # Verify transaction and raises an TxValidationError if tx is not valid. tx.verify() if tx.is_block: tx = cast(Block, tx) assert tx.hash is not None # XXX: it appears that after casting this assert "casting" is lost if not skip_block_weight_verification: # Validate minimum block difficulty block_weight = self.calculate_block_difficulty(tx) if tx.weight < block_weight - settings.WEIGHT_TOL: raise InvalidNewTransaction( 'Invalid new block {}: weight ({}) is smaller than the minimum weight ({})' .format(tx.hash.hex(), tx.weight, block_weight)) parent_block = tx.get_block_parent() tokens_issued_per_block = self.get_tokens_issued_per_block( parent_block.get_metadata().height + 1) if tx.sum_outputs != tokens_issued_per_block: raise InvalidNewTransaction( 'Invalid number of issued tokens tag=invalid_issued_tokens' ' tx.hash={tx.hash_hex} issued={tx.sum_outputs} allowed={allowed}' .format( tx=tx, allowed=tokens_issued_per_block, )) else: assert tx.hash is not None # XXX: it appears that after casting this assert "casting" is lost # Validate minimum tx difficulty min_tx_weight = self.minimum_tx_weight(tx) if tx.weight < min_tx_weight - settings.WEIGHT_TOL: raise InvalidNewTransaction( 'Invalid new tx {}: weight ({}) is smaller than the minimum weight ({})' .format(tx.hash_hex, tx.weight, min_tx_weight)) return True
def verify_weight(self) -> None: """Validate minimum tx difficulty.""" min_tx_weight = daa.minimum_tx_weight(self) if self.weight < min_tx_weight - settings.WEIGHT_TOL: raise InvalidNewTransaction( f'Invalid new tx {self.hash_hex}: weight ({self.weight}) is ' f'smaller than the minimum weight ({min_tx_weight})')
def verify_checkpoint(self, checkpoints: List[Checkpoint]) -> None: assert self.storage is not None meta = self.get_metadata() # at least one child must be checkpoint validated for child_tx in map(self.storage.get_transaction, meta.children): if child_tx.get_metadata().validation.is_checkpoint(): return raise InvalidNewTransaction( f'Invalid new transaction {self.hash_hex}: expected to reach a checkpoint but ' 'none of its children is checkpoint-valid')
def render_POST(self, request): """ Post request /create_tx/ that returns an encoded tx, if valid Expects {"inputs":[{"tx_id": <hex encoded>, "index": <int>, "data": <optional base64 encoded>}], "outputs":[{"value": <int, 1.00 HTR = 100>, "token_uid": <optional omit for HTR, hex encoded>, "address" or "script"}]} as POST data """ request.setHeader(b'content-type', b'application/json; charset=utf-8') set_cors(request, 'POST') body_content = json_loadb(request.content.read()) raw_inputs = body_content.get('inputs', []) raw_outputs = body_content.get('outputs', []) inputs = [TxInput.create_from_dict(i) for i in raw_inputs] tokens = [] outputs = [from_raw_output(i, tokens) for i in raw_outputs] timestamp = int(max(self.manager.tx_storage.latest_timestamp, self.manager.reactor.seconds())) parents = self.manager.get_new_tx_parents(timestamp) # this tx will have to be mined by tx-mining-server or equivalent tx = Transaction( timestamp=timestamp, inputs=inputs, outputs=outputs, parents=parents, storage=self.manager.tx_storage, ) fake_signed_tx = tx.clone() for tx_input in fake_signed_tx.inputs: # conservative estimate of the input data size to estimate a valid weight tx_input.data = b'\0' * 107 tx.weight = minimum_tx_weight(fake_signed_tx) tx.verify_unsigned_skip_pow() if tx.is_double_spending(): raise InvalidNewTransaction('At least one of your inputs has already been spent.') hex_data = bytes(tx).hex() data = tx.to_json() data.pop('hash', None) data.pop('nonce', None) return json_dumpb({ 'success': True, 'hex_data': hex_data, 'data': data, })
def on_new_tx(self, tx: BaseTransaction, *, conn: Optional[HathorProtocol] = None, quiet: bool = False, fails_silently: bool = True, propagate_to_peers: bool = True) -> bool: """This method is called when any transaction arrive. If `fails_silently` is False, it may raise either InvalidNewTransaction or TxValidationError. :return: True if the transaction was accepted :rtype: bool """ assert tx.hash is not None if self.state != self.NodeState.INITIALIZING: if self.tx_storage.transaction_exists(tx.hash): if not fails_silently: raise InvalidNewTransaction( 'Transaction already exists {}'.format(tx.hash.hex())) self.log.debug( 'on_new_tx(): Already have transaction {}'.format( tx.hash.hex())) return False try: assert self.validate_new_tx(tx) is True except (InvalidNewTransaction, TxValidationError) as e: # Discard invalid Transaction/block. self.log.debug('Transaction/Block discarded', tx=tx, exc=e) if not fails_silently: raise return False if self.state != self.NodeState.INITIALIZING: self.tx_storage.save_transaction(tx) else: tx.reset_metadata() self.tx_storage._add_to_cache(tx) try: tx.update_initial_metadata() self.consensus_algorithm.update(tx) except Exception: pretty_json = json.dumps(tx.to_json(), indent=4) self.log.error( 'An unexpected error occurred when processing {tx.hash_hex}\n' '{pretty_json}', tx=tx, pretty_json=pretty_json) self.tx_storage.remove_transaction(tx) raise if not quiet: ts_date = datetime.datetime.fromtimestamp(tx.timestamp) if tx.is_block: self.log.info('New block found', tag='new_block', tx=tx, ts_date=ts_date, time_from_now=tx.get_time_from_now()) else: self.log.info('New transaction found', tag='new_tx', tx=tx, ts_date=ts_date, time_from_now=tx.get_time_from_now()) if propagate_to_peers: # Propagate to our peers. self.connections.send_tx_to_peers(tx) if self.wallet: # TODO Remove it and use pubsub instead. self.wallet.on_new_tx(tx) # Publish to pubsub manager the new tx accepted self.pubsub.publish(HathorEvents.NETWORK_NEW_TX_ACCEPTED, tx=tx) return True
def on_new_tx(self, tx: BaseTransaction, *, conn: Optional[HathorProtocol] = None, quiet: bool = False, fails_silently: bool = True, propagate_to_peers: bool = True, skip_block_weight_verification: bool = False, sync_checkpoints: bool = False, partial: bool = False) -> bool: """ New method for adding transactions or blocks that steps the validation state machine. :param tx: transaction to be added :param conn: optionally specify the protocol instance where this tx was received from :param quiet: if True will not log when a new tx is accepted :param fails_silently: if False will raise an exception when tx cannot be added :param propagate_to_peers: if True will relay the tx to other peers if it is accepted :param skip_block_weight_verification: if True will not check the tx PoW :param sync_checkpoints: if True and also partial=True, will try to validate as a checkpoint and set the proper validation state, this is used for adding txs from the sync-checkpoints phase :param partial: if True will accept txs that can't be fully validated yet (because of missing parent/input) but will run a basic validation of what can be validated (PoW and other basic fields) """ assert tx.hash is not None if self.tx_storage.transaction_exists(tx.hash): if not fails_silently: raise InvalidNewTransaction( 'Transaction already exists {}'.format(tx.hash_hex)) self.log.warn('on_new_tx(): Transaction already exists', tx=tx.hash_hex) return False if tx.timestamp - self.reactor.seconds( ) > settings.MAX_FUTURE_TIMESTAMP_ALLOWED: if not fails_silently: raise InvalidNewTransaction( 'Ignoring transaction in the future {} (timestamp={})'. format(tx.hash_hex, tx.timestamp)) self.log.warn('on_new_tx(): Ignoring transaction in the future', tx=tx.hash_hex, future_timestamp=tx.timestamp) return False tx.storage = self.tx_storage try: metadata = tx.get_metadata() except TransactionDoesNotExist: if not fails_silently: raise InvalidNewTransaction('missing parent') self.log.warn('on_new_tx(): missing parent', tx=tx.hash_hex) return False if metadata.validation.is_invalid(): if not fails_silently: raise InvalidNewTransaction('previously marked as invalid') self.log.warn('on_new_tx(): previously marked as invalid', tx=tx.hash_hex) return False # if partial=False (the default) we don't even try to partially validate transactions if not partial or (metadata.validation.is_fully_connected() or tx.can_validate_full()): if isinstance(tx, Transaction) and self.tx_storage.is_tx_needed( tx.hash): tx._height_cache = self.tx_storage.needed_index_height(tx.hash) if not metadata.validation.is_fully_connected(): try: tx.validate_full(sync_checkpoints=sync_checkpoints) except HathorError as e: if not fails_silently: raise InvalidNewTransaction( 'full validation failed') from e self.log.warn('on_new_tx(): full validation failed', tx=tx.hash_hex, exc_info=True) return False # The method below adds the tx as a child of the parents # This needs to be called right before the save because we were adding the children # in the tx parents even if the tx was invalid (failing the verifications above) # then I would have a children that was not in the storage tx.update_initial_metadata() self.tx_storage.save_transaction(tx, add_to_indexes=True) try: self.consensus_algorithm.update(tx) except HathorError as e: if not fails_silently: raise InvalidNewTransaction( 'consensus update failed') from e self.log.warn('on_new_tx(): consensus update failed', tx=tx.hash_hex) return False else: assert tx.validate_full(skip_block_weight_verification=True) self.tx_fully_validated(tx) elif sync_checkpoints: metadata.children = self.tx_storage.children_from_deps(tx.hash) try: tx.validate_checkpoint(self.checkpoints) except HathorError: if not fails_silently: raise InvalidNewTransaction('checkpoint validation failed') self.log.warn('on_new_tx(): checkpoint validation failed', tx=tx.hash_hex, exc_info=True) return False self.tx_storage.save_transaction(tx) self.tx_storage.add_to_deps_index(tx.hash, tx.get_all_dependencies()) self.tx_storage.add_needed_deps(tx) else: if isinstance(tx, Block) and not tx.has_basic_block_parent(): if not fails_silently: raise InvalidNewTransaction( 'block parent needs to be at least basic-valid') self.log.warn( 'on_new_tx(): block parent needs to be at least basic-valid', tx=tx.hash_hex) return False if not tx.validate_basic(): if not fails_silently: raise InvalidNewTransaction('basic validation failed') self.log.warn('on_new_tx(): basic validation failed', tx=tx.hash_hex) return False # The method below adds the tx as a child of the parents # This needs to be called right before the save because we were adding the children # in the tx parents even if the tx was invalid (failing the verifications above) # then I would have a children that was not in the storage tx.update_initial_metadata() self.tx_storage.save_transaction(tx) self.tx_storage.add_to_deps_index(tx.hash, tx.get_all_dependencies()) self.tx_storage.add_needed_deps(tx) if tx.is_transaction: self.tx_storage.remove_from_needed_index(tx.hash) try: self.step_validations([tx]) except (AssertionError, HathorError) as e: if not fails_silently: raise InvalidNewTransaction('step validations failed') from e self.log.warn('on_new_tx(): step validations failed', tx=tx.hash_hex, exc_info=True) return False if not quiet: ts_date = datetime.datetime.fromtimestamp(tx.timestamp) now = datetime.datetime.fromtimestamp(self.reactor.seconds()) if tx.is_block: self.log.info('new block', tx=tx, ts_date=ts_date, time_from_now=tx.get_time_from_now(now)) else: self.log.info('new tx', tx=tx, ts_date=ts_date, time_from_now=tx.get_time_from_now(now)) if propagate_to_peers: # Propagate to our peers. self.connections.send_tx_to_peers(tx) return True
def on_new_tx(self, tx: BaseTransaction, *, conn: Optional[HathorProtocol] = None, quiet: bool = False, fails_silently: bool = True, propagate_to_peers: bool = True, skip_block_weight_verification: bool = False) -> bool: """This method is called when any transaction arrive. If `fails_silently` is False, it may raise either InvalidNewTransaction or TxValidationError. :return: True if the transaction was accepted :rtype: bool """ assert tx.hash is not None if self.state != self.NodeState.INITIALIZING: if self.tx_storage.transaction_exists(tx.hash): if not fails_silently: raise InvalidNewTransaction( 'Transaction already exists {}'.format(tx.hash_hex)) self.log.debug('on_new_tx(): Transaction already exists', tx=tx.hash_hex) return False if self.state != self.NodeState.INITIALIZING or self._full_verification: try: assert self.validate_new_tx( tx, skip_block_weight_verification= skip_block_weight_verification) is True except (InvalidNewTransaction, TxValidationError): # Discard invalid Transaction/block. self.log.debug('tx/block discarded', tx=tx, exc_info=True) if not fails_silently: raise return False if self.state != self.NodeState.INITIALIZING: self.tx_storage.save_transaction(tx) else: self.tx_storage._add_to_cache(tx) if self._full_verification: tx.reset_metadata() else: # When doing a fast init, we don't update the consensus, so we must trust the data on the metadata # For transactions, we don't store them on the tips index if they are voided # We have to execute _add_to_cache before because _del_from_cache does not remove from all indexes metadata = tx.get_metadata() if not tx.is_block and metadata.voided_by: self.tx_storage._del_from_cache(tx) if self.state != self.NodeState.INITIALIZING or self._full_verification: try: tx.update_initial_metadata() self.consensus_algorithm.update(tx) except Exception: self.log.exception('unexpected error when processing tx', tx=tx) self.tx_storage.remove_transaction(tx) raise if not quiet: ts_date = datetime.datetime.fromtimestamp(tx.timestamp) if tx.is_block: self.log.info('new block', tx=tx, ts_date=ts_date, time_from_now=tx.get_time_from_now()) else: self.log.info('new tx', tx=tx, ts_date=ts_date, time_from_now=tx.get_time_from_now()) if propagate_to_peers: # Propagate to our peers. self.connections.send_tx_to_peers(tx) if self.wallet: # TODO Remove it and use pubsub instead. self.wallet.on_new_tx(tx) # Publish to pubsub manager the new tx accepted self.pubsub.publish(HathorEvents.NETWORK_NEW_TX_ACCEPTED, tx=tx) return True