def block_to_bx_block( self, block_msg: OntConsensusMessage, tx_service: TransactionService) -> Tuple[memoryview, BlockInfo]: compress_start_datetime = datetime.utcnow() compress_start_timestamp = time.time() extension_tx_service = typing.cast(ExtensionTransactionService, tx_service) self._default_block_size = max(self._default_block_size, len(block_msg.buf)) tsk = self.compression_tasks.borrow_task() tsk.init(tpe.InputBytes(block_msg.buf), extension_tx_service.proxy) try: task_pool_proxy.run_task(tsk) except tpe.AggregatedException as e: self.compression_tasks.return_task(tsk) raise message_conversion_error.btc_block_compression_error( block_msg.block_hash(), e) bx_block = tsk.bx_block() block = memoryview(bx_block) compressed_size = len(block) original_size = len(block_msg.rawbytes()) block_hash = OntObjectHash( binary=convert.hex_to_bytes(tsk.block_hash().hex_string())) block_info = BlockInfo( block_hash, tsk.short_ids(), compress_start_datetime, datetime.utcnow(), (time.time() - compress_start_timestamp) * 1000, tsk.txn_count(), tsk.compressed_block_hash().hex_string(), tsk.prev_block_hash().hex_string(), original_size, compressed_size, 100 - float(compressed_size) / original_size * 100) self.compression_tasks.return_task(tsk) return block, block_info
def get_sample_block(): root_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) with open(os.path.join(root_dir, "ont_consensus_sample_block.txt")) as sample_file: ont_block = sample_file.read().strip("\n") buf = bytearray(convert.hex_to_bytes(ont_block)) parsed_block = OntConsensusMessage(buf=buf) return parsed_block
def test_peek_message_success_all_types(self): self.get_message_preview_successfully(self.VERSION_ONT_MESSAGE, VersionOntMessage.MESSAGE_TYPE, 83) self.get_message_preview_successfully( VerAckOntMessage(self.MAGIC, True), VerAckOntMessage.MESSAGE_TYPE, 1) self.get_message_preview_successfully(PingOntMessage(self.MAGIC), PingOntMessage.MESSAGE_TYPE, 8) self.get_message_preview_successfully(PongOntMessage(self.MAGIC, 123), PongOntMessage.MESSAGE_TYPE, 8) self.get_message_preview_successfully(GetAddrOntMessage(self.MAGIC), GetAddrOntMessage.MESSAGE_TYPE, 0) self.get_message_preview_successfully( AddrOntMessage( self.MAGIC, [(int(time.time()), 123, "127.0.0.1", 20300, 20200, 1234)]), AddrOntMessage.MESSAGE_TYPE, 52) self.get_message_preview_successfully( OntConsensusMessage(self.MAGIC, self.VERSION, bytes(20)), OntConsensusMessage.MESSAGE_TYPE, 24) self.get_message_preview_successfully( InvOntMessage(self.MAGIC, InventoryOntType.MSG_TX, [self.HASH, self.HASH]), InvOntMessage.MESSAGE_TYPE, 69) self.get_message_preview_successfully( GetDataOntMessage(self.MAGIC, 1, self.HASH), GetDataOntMessage.MESSAGE_TYPE, 33) self.get_message_preview_successfully( GetHeadersOntMessage(self.MAGIC, 1, self.HASH, self.HASH), GetHeadersOntMessage.MESSAGE_TYPE, 65) self.get_message_preview_successfully( GetBlocksOntMessage(self.MAGIC, 1, self.HASH, self.HASH), GetBlocksOntMessage.MESSAGE_TYPE, 65) self.get_message_preview_successfully( TxOntMessage(self.MAGIC, self.VERSION, bytes(20)), TxOntMessage.MESSAGE_TYPE, 21) self.get_message_preview_successfully( BlockOntMessage(self.MAGIC, self.VERSION, self.HASH, self.HASH, self.HASH, 0, 0, 0, bytes(10), bytes(20), [bytes(33)] * 5, [bytes(2)] * 3, [bytes(32)] * 5, self.HASH), BlockOntMessage.MESSAGE_TYPE, 524) self.get_message_preview_successfully( HeadersOntMessage(self.MAGIC, [bytes(1)] * 2), HeadersOntMessage.MESSAGE_TYPE, 6) self.get_message_preview_successfully( NotFoundOntMessage(self.MAGIC, self.HASH), NotFoundOntMessage.MESSAGE_TYPE, 32)
def build_ont_block(block_pieces: Deque[Union[bytearray, memoryview]]) -> OntConsensusMessage: ont_block = bytearray() ont_block += block_pieces[0] # Construct base 64 encoded data and then add it to ont_block consensus_data_type, = struct.unpack_from("<B", block_pieces[1], 0) consensus_data_len, = struct.unpack_from("<L", block_pieces[2], 0) consensus_data_payload = bytearray(block_pieces[3]) for i in range(4, len(block_pieces) - 1): consensus_data_payload += block_pieces[i] consensus_data = ConsensusMsgPayload(consensus_data_type, consensus_data_len, base64.b64encode( bytes(consensus_data_payload) ).decode(constants.DEFAULT_TEXT_ENCODING)) consensus_msg_payload = json.dumps(consensus_data, cls=EnhancedJSONEncoder, separators=(",", ":")) ont_block += bytearray(consensus_msg_payload.encode(constants.DEFAULT_TEXT_ENCODING)) ont_block += block_pieces[len(block_pieces) - 1] return OntConsensusMessage(buf=ont_block)
def bx_block_to_block( self, bx_block_msg: memoryview, tx_service: ExtensionTransactionService ) -> BlockDecompressionResult: decompress_start_datetime = datetime.utcnow() decompress_start_timestamp = time.time() tsk = self.decompression_tasks.borrow_task() tsk.init(tpe.InputBytes(bx_block_msg), tx_service.proxy) try: task_pool_proxy.run_task(tsk) except tpe.AggregatedException as e: self.decompression_tasks.return_task(tsk) header_info = ont_normal_consensus_message_converter.parse_bx_block_header( bx_block_msg, deque()) raise message_conversion_error.btc_block_decompression_error( header_info.block_hash, e) total_tx_count = tsk.txn_count() unknown_tx_hashes = [ Sha256Hash(bytearray(unknown_tx_hash.binary())) for unknown_tx_hash in tsk.unknown_tx_hashes() ] unknown_tx_sids = tsk.unknown_tx_sids() block_hash = OntObjectHash( binary=convert.hex_to_bytes(tsk.block_hash().hex_string())) if tsk.success(): ont_block_msg = OntConsensusMessage( buf=memoryview(tsk.block_message())) logger.debug( "Successfully parsed block broadcast message. {} transactions " "in block {}", total_tx_count, block_hash) else: ont_block_msg = None logger.debug( "Block recovery needed for {}. Missing {} sids, {} tx hashes. " "Total txs in block: {}", block_hash, len(unknown_tx_sids), len(unknown_tx_hashes), total_tx_count) block_info = abstract_ont_message_converter.get_block_info( bx_block_msg, block_hash, tsk.short_ids(), decompress_start_datetime, decompress_start_timestamp, total_tx_count, ont_block_msg) self.decompression_tasks.return_task(tsk) return BlockDecompressionResult(ont_block_msg, block_info, unknown_tx_sids, unknown_tx_hashes)
def test_parse_message_success_all_types(self): self.create_message_successfully(self.VERSION_ONT_MESSAGE, VersionOntMessage) self.create_message_successfully(VerAckOntMessage(self.MAGIC, False), VerAckOntMessage) self.create_message_successfully(PingOntMessage(self.MAGIC), PingOntMessage) self.create_message_successfully(PongOntMessage(self.MAGIC, 123), PongOntMessage) self.create_message_successfully(GetAddrOntMessage(self.MAGIC), GetAddrOntMessage) self.create_message_successfully( AddrOntMessage( self.MAGIC, [(int(time.time()), 123, "127.0.0.1", 20300, 20200, 1234)]), AddrOntMessage) self.create_message_successfully( OntConsensusMessage(self.MAGIC, self.VERSION, bytes(20)), OntConsensusMessage) self.create_message_successfully( InvOntMessage(self.MAGIC, InventoryOntType.MSG_TX, [self.HASH, self.HASH]), InvOntMessage) self.create_message_successfully( GetDataOntMessage(self.MAGIC, 1, self.HASH), GetDataOntMessage) self.create_message_successfully( GetHeadersOntMessage(self.MAGIC, 1, self.HASH, self.HASH), GetHeadersOntMessage) self.create_message_successfully( GetBlocksOntMessage(self.MAGIC, 1, self.HASH, self.HASH), GetBlocksOntMessage) self.create_message_successfully( TxOntMessage(self.MAGIC, self.VERSION, bytes(20)), TxOntMessage) self.create_message_successfully( BlockOntMessage(self.MAGIC, self.VERSION, self.HASH, self.HASH, self.HASH, 0, 0, 0, bytes(10), bytes(20), [bytes(33)] * 5, [bytes(2)] * 3, [bytes(32)] * 5, self.HASH), BlockOntMessage) self.create_message_successfully( HeadersOntMessage(self.MAGIC, [bytes(1)] * 2), HeadersOntMessage) self.create_message_successfully( NotFoundOntMessage(self.MAGIC, self.HASH), NotFoundOntMessage)
def msg_consensus(self, msg: OntConsensusMessage): if not self.node.opts.is_consensus: return if msg.consensus_data_type() != ont_constants.BLOCK_PROPOSAL_CONSENSUS_MESSAGE_TYPE: return block_hash = msg.block_hash() node = self.connection.node if not node.should_process_block_hash(block_hash): return node.block_cleanup_service.on_new_block_received(block_hash, msg.prev_block_hash()) block_stats.add_block_event_by_block_hash(block_hash, BlockStatEventType.BLOCK_RECEIVED_FROM_BLOCKCHAIN_NODE, network_num=self.connection.network_num, broadcast_type=BroadcastMessageType.CONSENSUS, more_info="Protocol: {}, Network: {}".format( node.opts.blockchain_protocol, node.opts.blockchain_network ), msg_size=len(msg.rawbytes()) ) if block_hash in self.connection.node.blocks_seen.contents: self.node.on_block_seen_by_blockchain_node(block_hash, self.connection) block_stats.add_block_event_by_block_hash(block_hash, BlockStatEventType.BLOCK_RECEIVED_FROM_BLOCKCHAIN_NODE_IGNORE_SEEN, network_num=self.connection.network_num, broadcast_type=BroadcastMessageType.CONSENSUS) self.connection.log_info( "Discarding duplicate consensus block {} from local blockchain node.", block_hash ) return node.track_block_from_node_handling_started(block_hash) self.connection.log_info( "Processing consensus block {} from local blockchain node.", block_hash ) # Broadcast BlockHoldingMessage through relays and gateways conns = self.node.broadcast(BlockHoldingMessage(block_hash, self.node.network_num), broadcasting_conn=self.connection, prepend_to_queue=True, connection_types=[ConnectionType.RELAY_BLOCK, ConnectionType.GATEWAY]) if len(conns) > 0: block_stats.add_block_event_by_block_hash(block_hash, BlockStatEventType.BLOCK_HOLD_SENT_BY_GATEWAY_TO_PEERS, network_num=self.node.network_num, broadcast_type=BroadcastMessageType.CONSENSUS, peers=conns ) try: bx_block, block_info = self.node.consensus_message_converter.block_to_bx_block( msg, self.node.get_tx_service(), self.node.opts.enable_block_compression, self.node.network.min_tx_age_seconds ) except MessageConversionError as e: block_stats.add_block_event_by_block_hash( e.msg_hash, BlockStatEventType.BLOCK_CONVERSION_FAILED, network_num=self.connection.network_num, broadcast_type=BroadcastMessageType.CONSENSUS, conversion_type=e.conversion_type.value ) self.connection.log_error(log_messages.BLOCK_COMPRESSION_FAIL_ONT_CONSENSUS, e.msg_hash, e) return if block_info.ignored_short_ids: self.connection.log_debug( "Ignoring {} new SIDs for {}: {}", len(block_info.ignored_short_ids), block_info.block_hash, block_info.ignored_short_ids ) block_stats.add_block_event_by_block_hash(block_hash, BlockStatEventType.BLOCK_COMPRESSED, start_date_time=block_info.start_datetime, end_date_time=block_info.end_datetime, network_num=self.connection.network_num, broadcast_type=BroadcastMessageType.CONSENSUS, prev_block_hash=block_info.prev_block_hash, original_size=block_info.original_size, txs_count=block_info.txn_count, blockchain_network=self.node.opts.blockchain_network, blockchain_protocol=self.node.opts.blockchain_protocol, matching_block_hash=block_info.compressed_block_hash, matching_block_type=StatBlockType.COMPRESSED.value, more_info="Consensus compression: {}->{} bytes, {}, {}; " "Tx count: {}".format( block_info.original_size, block_info.compressed_size, stats_format.percentage(block_info.compression_rate), stats_format.duration(block_info.duration_ms), block_info.txn_count ) ) self.node.block_processing_service._process_and_broadcast_compressed_block( bx_block, self.connection, block_info, block_hash )