def add_tx(self, request) -> None: tx_json = request.tx_json tx_versioner = self._channel_service.block_manager.get_blockchain( ).tx_versioner tx_version = tx_versioner.get_version(tx_json) ts = TransactionSerializer.new(tx_version, tx_versioner) tx = ts.from_(tx_json) tv = TransactionVerifier.new(tx_version, tx_versioner) tv.verify(tx) object_has_queue = self._channel_service.get_object_has_queue_by_consensus( ) if tx is not None: object_has_queue.add_tx_obj(tx) util.apm_event( ChannelProperty().peer_id, { 'event_type': 'AddTx', 'peer_id': ChannelProperty().peer_id, 'peer_name': conf.PEER_NAME, 'channel_name': ChannelProperty().name, 'data': { 'tx_hash': tx.tx_hash } })
async def create_icx_tx(self, kwargs: dict): result_code = None exception = None try: tv = TransactionVersions() tx_version = tv.get_version(kwargs) tx_hash_version = self._channel_service.get_channel_option()["tx_hash_version"] ts = TransactionSerializer.new(tx_version, tx_hash_version) tx = ts.from_(kwargs) tv = TransactionVerifier.new(tx_version, tx_hash_version) tv.verify(tx) block_manager = self._channel_service.block_manager block_manager.pre_validate(tx) logging.debug(f"create icx input : {kwargs}") self._channel_service.broadcast_scheduler.schedule_job(BroadcastCommand.CREATE_TX, tx) return message_code.Response.success, tx.hash.hex() except TransactionInvalidError as e: result_code = e.message_code exception = e except BaseException as e: result_code = TransactionInvalidError.message_code exception = e finally: if exception: logging.warning(f"create_icx_tx: tx restore fail for kwargs({kwargs}), {exception}") return result_code, None
def add_tx_list(self, request) -> tuple: tx_list = [] for tx_item in request.tx_list: tx_json = json.loads(tx_item.tx_json) tx_version = self.__tx_versioner.get_version(tx_json) ts = TransactionSerializer.new(tx_version, self.__tx_versioner) tx = ts.from_(tx_json) tv = TransactionVerifier.new(tx_version, self.__tx_versioner) tv.verify(tx) tx.size(self.__tx_versioner) tx_list.append(tx) tx_len = len(tx_list) if tx_len == 0: response_code = message_code.Response.fail message = "fail tx validate while AddTxList" else: self.__tx_queue.put(tx_list) response_code = message_code.Response.success message = f"success ({len(tx_list)})/({len(request.tx_list)})" return response_code, message
def add_tx(self, request) -> None: tx_json = request.tx_json tx_versioner = self._channel_service.block_manager.get_blockchain( ).tx_versioner tx_version = tx_versioner.get_version(tx_json) ts = TransactionSerializer.new(tx_version, tx_versioner) tx = ts.from_(tx_json) tv = TransactionVerifier.new(tx_version, tx_versioner) tv.verify(tx) if tx is not None: self._channel_service.block_manager.add_tx_obj(tx) util.apm_event( ChannelProperty().peer_id, { 'event_type': 'AddTx', 'peer_id': ChannelProperty().peer_id, 'peer_name': conf.PEER_NAME, 'channel_name': ChannelProperty().name, 'data': { 'tx_hash': tx.tx_hash } }) if not conf.ALLOW_MAKE_EMPTY_BLOCK: self._channel_service.start_leader_complain_timer_if_tx_exists()
def _makeup_block(self): block_builder = BlockBuilder.new("0.1a") tx_versions = TransactionVersions() while self._txQueue: if len(block_builder) >= conf.MAX_TX_SIZE_IN_BLOCK: logging.debug( f"consensus_base total size({len(block_builder)}) " f"count({len(block_builder.transactions)}) " f"_txQueue size ({len(self._txQueue)})") break tx: 'Transaction' = self._txQueue.get_item_in_status( TransactionStatusInQueue.normal, TransactionStatusInQueue.added_to_block) if tx is None: break tx_hash_version = tx_versions.get_hash_generator_version( tx.version) tv = TransactionVerifier.new(tx.version, tx_hash_version) try: tv.verify(tx, self._blockchain) except Exception as e: logging.warning(f"tx hash invalid. tx: {tx}") else: block_builder.transactions[tx.hash] = tx return block_builder
def __add_tx_to_block(self, block_builder): tx_queue = self.__block_manager.get_tx_queue() block_tx_size = 0 tx_versioner = self.__blockchain.tx_versioner while tx_queue: if block_tx_size >= conf.MAX_TX_SIZE_IN_BLOCK: logging.debug(f"consensus_base total size({block_builder.size()}) " f"count({len(block_builder.transactions)}) " f"_txQueue size ({len(tx_queue)})") break tx: 'Transaction' = tx_queue.get_item_in_status( TransactionStatusInQueue.normal, TransactionStatusInQueue.added_to_block ) if tx is None: break tv = TransactionVerifier.new(tx.version, tx_versioner) try: tv.verify(tx, self.__blockchain) except Exception as e: logging.warning(f"tx hash invalid.\n" f"tx: {tx}\n" f"exception: {e}") traceback.print_exc() else: block_builder.transactions[tx.hash] = tx block_tx_size += tx.size(tx_versioner)
async def create_icx_tx(self, kwargs: dict): if self.__qos_controller.limit(): util.logger.debug(f"Out of TPS limit. tx={kwargs}") return message_code.Response.fail_out_of_tps_limit, None node_type = self.__properties.get('node_type', None) if node_type is None: util.logger.warning("Node type has not been set yet.") return NodeInitializationError.message_code, None elif node_type != conf.NodeType.CommunityNode.value: return message_code.Response.fail_no_permission, None result_code = None exception = None tx = None try: tx_version = self.__tx_versioner.get_version(kwargs) ts = TransactionSerializer.new(tx_version, self.__tx_versioner) tx = ts.from_(kwargs) nid = self.__properties.get('nid', None) if nid is None: util.logger.warning(f"NID has not been set yet.") raise NodeInitializationError(tx.hash.hex()) tv = TransactionVerifier.new(tx_version, self.__tx_versioner) tv.pre_verify(tx, nid=nid) self.__pre_validate(tx) logging.debug(f"create icx input : {kwargs}") self.__broadcast_scheduler.schedule_job(BroadcastCommand.CREATE_TX, (tx, self.__tx_versioner)) return message_code.Response.success, tx.hash.hex() except MessageCodeError as e: result_code = e.message_code exception = e traceback.print_exc() except BaseException as e: result_code = TransactionInvalidError.message_code exception = e traceback.print_exc() finally: if exception: logging.warning(f"create_icx_tx: tx restore fail.\n\n" f"kwargs({kwargs})\n\n" f"tx({tx})\n\n" f"exception({exception})") return result_code, None
def test_verify(self, plyvel_db, block_versioner, tx_versioner): """ 1. prepare plyvel db, block_versioner, tx_versioner 2. pick block, transaction, vote, etc from db 3. verify block, vote transaction, vote, etc... """ # given db instance, block_versioner, tx_versioner block_key = plyvel_db.get(b'last_block_key') while True: # when get block from db block_dumped = plyvel_db.get(block_key) Logger.info(f"block_dump : {block_dumped}") block_serialized = json.loads(block_dumped) block_height = block_versioner.get_height(block_serialized) block_version = block_versioner.get_version(block_height) block_serializer = BlockSerializer.new(block_version, tx_versioner) block = block_serializer.deserialize(block_serialized) Logger.info(f"block_height : {block_height}, block_version : {block_version}") if block_height == 0: break # then block verify block_verifier = BlockVerifier.new(block_version, tx_versioner) block_verifier.verify_signature(block) # then vote verify if parse_version(block_version) >= parse_version("0.3"): Logger.info(f"leader_votes : {block.body.leader_votes}") for leader_vote in block.body.leader_votes: if not leader_vote: continue leader_vote.verify() Logger.info(f"prev_votes : {block.body.prev_votes}") for block_vote in block.body.prev_votes: if not block_vote: continue block_vote.verify() # then transaction verify for tx in block.body.transactions.values(): tv = TransactionVerifier.new(tx.version, tx.type(), tx_versioner) tv.verify_signature(tx) Logger.info(f"prev_hash : {block.header.prev_hash}, {bytes(block.header.prev_hash)}") block_key = block.header.prev_hash.hex().encode("utf-8")
def add_tx_list(self, request) -> tuple: tx_validate_count = 0 for tx_item in request.tx_list: tx_json = json.loads(tx_item.tx_json) tv = TransactionVersions() tx_version = tv.get_version(tx_json) tx_hash_version = self._channel_service.get_channel_option( )["tx_hash_version"] ts = TransactionSerializer.new(tx_version, tx_hash_version) tx = ts.from_(tx_json) tv = TransactionVerifier.new(tx_version, tx_hash_version) tv.verify(tx) # util.logger.spam(f"channel_inner_service:add_tx tx({tx.get_data_string()})") object_has_queue = self._channel_service.get_object_has_queue_by_consensus( ) if tx is not None: object_has_queue.add_tx_obj(tx) tx_validate_count += 1 util.apm_event( ChannelProperty().peer_id, { 'event_type': 'AddTx', 'peer_id': ChannelProperty().peer_id, 'peer_name': conf.PEER_NAME, 'channel_name': ChannelProperty().name, 'data': { 'tx_hash': tx.hash.hex() } }) if tx_validate_count == 0: response_code = message_code.Response.fail message = "fail tx validate while AddTxList" else: response_code = message_code.Response.success message = f"success ({tx_validate_count})/({len(request.tx_list)})" return response_code, message
async def create_icx_tx(self, kwargs: dict): result_code = None exception = None tx = None try: tx_versioner = self._channel_service.block_manager.get_blockchain( ).tx_versioner tx_version = tx_versioner.get_version(kwargs) ts = TransactionSerializer.new(tx_version, tx_versioner) tx = ts.from_(kwargs) tv = TransactionVerifier.new(tx_version, tx_versioner) tv.verify(tx) block_manager = self._channel_service.block_manager block_manager.pre_validate(tx) logging.debug(f"create icx input : {kwargs}") self._channel_service.broadcast_scheduler.schedule_job( BroadcastCommand.CREATE_TX, (tx, tx_versioner)) return message_code.Response.success, tx.hash.hex() except TransactionInvalidError as e: result_code = e.message_code exception = e traceback.print_exc() except BaseException as e: result_code = TransactionInvalidError.message_code exception = e traceback.print_exc() finally: if exception: logging.warning(f"create_icx_tx: tx restore fail.\n\n" f"kwargs({kwargs})\n\n" f"tx({tx})\n\n" f"exception({exception})") return result_code, None
def __add_tx_to_block(self, block_builder): tx_queue = self.__block_manager.get_tx_queue() block_tx_size = 0 tx_versioner = self.__blockchain.tx_versioner while tx_queue: if block_tx_size >= conf.MAX_TX_SIZE_IN_BLOCK: logging.debug(f"consensus_base total size({block_builder.size()}) " f"count({len(block_builder.transactions)}) " f"_txQueue size ({len(tx_queue)})") break tx: 'Transaction' = tx_queue.get_item_in_status( TransactionStatusInQueue.normal, TransactionStatusInQueue.added_to_block ) if tx is None: break if not util.is_in_time_boundary(tx.timestamp, conf.ALLOW_TIMESTAMP_BOUNDARY_SECOND_IN_BLOCK): util.logger.info(f"fail add tx to block by ALLOW_TIMESTAMP_BOUNDARY_SECOND_IN_BLOCK" f"({conf.ALLOW_TIMESTAMP_BOUNDARY_SECOND_IN_BLOCK}) " f"tx({tx.hash}), timestamp({tx.timestamp})") continue tv = TransactionVerifier.new(tx.version, tx_versioner) try: tv.verify(tx, self.__blockchain) except Exception as e: logging.warning(f"tx hash invalid.\n" f"tx: {tx}\n" f"exception: {e}") traceback.print_exc() else: block_builder.transactions[tx.hash] = tx block_tx_size += tx.size(tx_versioner)
def _makeup_block(self): # self._check_unconfirmed_block() block_height = self._blockchain.last_block.header.height + 1 block_version = self._blockchain.block_versioner.get_version( block_height) block_builder = BlockBuilder.new(block_version, self._blockchain.tx_versioner) tx_versioner = self._blockchain.tx_versioner while self._txQueue: if block_builder.size() >= conf.MAX_TX_SIZE_IN_BLOCK: logging.debug( f"consensus_base total size({block_builder.size()}) " f"count({len(block_builder.transactions)}) " f"_txQueue size ({len(self._txQueue)})") break tx: 'Transaction' = self._txQueue.get_item_in_status( TransactionStatusInQueue.normal, TransactionStatusInQueue.added_to_block) if tx is None: break tv = TransactionVerifier.new(tx.version, tx_versioner) try: tv.verify(tx, self._blockchain) except Exception as e: logging.warning(f"tx hash invalid.\n" f"tx: {tx}\n" f"exception: {e}") traceback.print_exc() else: block_builder.transactions[tx.hash] = tx return block_builder