def _format_block_info_stats(self, block_info): if block_info is None: return "" return "Compression: {}, {}" \ .format(stats_format.duration(block_info.duration_ms), stats_format.percentage(block_info.compression_rate))
def _calc_bdn_performance_stats(self) -> Dict[str, Any]: stats = {} interval_data: Optional[ GatewayBdnPerformanceStatInterval] = gateway_bdn_performance_stats_service.get_most_recent_stats( ) if interval_data is None: stats.update({ INTERVAL_START_TIME: float("nan"), INTERVAL_END_TIME: float("nan"), BLOCKS_FROM_BDN: float("nan"), TX_FROM_BDN: float("nan"), BLOCKS_SEEN: float("nan"), }) return stats stats[INTERVAL_START_TIME] = str(interval_data.start_time) assert interval_data.end_time is not None stats[INTERVAL_END_TIME] = str(interval_data.end_time) if (interval_data.new_blocks_received_from_bdn + interval_data.new_blocks_received_from_blockchain_node == 0): stats[BLOCKS_FROM_BDN] = float("nan") if (interval_data.new_tx_received_from_bdn + interval_data.new_tx_received_from_blockchain_node == 0): stats[TX_FROM_BDN] = float("nan") if BLOCKS_FROM_BDN not in stats: stats[BLOCKS_FROM_BDN] = stats_format.percentage( 100 * (interval_data.new_blocks_received_from_bdn / (interval_data.new_blocks_received_from_bdn + interval_data.new_blocks_received_from_blockchain_node))) if TX_FROM_BDN not in stats: stats[TX_FROM_BDN] = stats_format.percentage( 100 * (interval_data.new_tx_received_from_bdn / (interval_data.new_tx_received_from_bdn + interval_data.new_tx_received_from_blockchain_node))) stats.update({ BLOCKS_SEEN: interval_data.new_blocks_seen, }) return stats
def _handle_decrypted_block( self, bx_block: memoryview, connection: AbstractRelayConnection, encrypted_block_hash_hex: Optional[str] = None, recovered: bool = False, recovered_txs_source: Optional[RecoveredTxsSource] = None) -> None: transaction_service = self._node.get_tx_service() message_converter = self._node.message_converter assert message_converter is not None valid_block = self._validate_compressed_block_header(bx_block) if not valid_block.is_valid: reason = valid_block.reason assert reason is not None block_stats.add_block_event_by_block_hash( valid_block.block_hash, BlockStatEventType.BLOCK_DECOMPRESSED_FAILED_VALIDATION, connection.network_num, more_info=reason) return # TODO: determine if a real block or test block. Discard if test block. if self._node.remote_node_conn or self._node.has_active_blockchain_peer( ): try: (block_message, block_info, unknown_sids, unknown_hashes) = message_converter.bx_block_to_block( bx_block, transaction_service) block_content_debug_utils.log_compressed_block_debug_info( transaction_service, bx_block) except MessageConversionError as e: block_stats.add_block_event_by_block_hash( e.msg_hash, BlockStatEventType.BLOCK_CONVERSION_FAILED, network_num=connection.network_num, conversion_type=e.conversion_type.value) transaction_service.on_block_cleaned_up(e.msg_hash) connection.log_warning(log_messages.FAILED_TO_DECOMPRESS_BLOCK, e.msg_hash, e) return else: connection.log_warning(log_messages.LACK_BLOCKCHAIN_CONNECTION) return block_hash = block_info.block_hash all_sids = block_info.short_ids if encrypted_block_hash_hex is not None: block_stats.add_block_event_by_block_hash( block_hash, BlockStatEventType.BLOCK_TO_ENC_BLOCK_MATCH, matching_block_hash=encrypted_block_hash_hex, matching_block_type=StatBlockType.ENCRYPTED.value, network_num=connection.network_num) self.cancel_hold_timeout(block_hash, connection) if recovered: block_stats.add_block_event_by_block_hash( block_hash, BlockStatEventType.BLOCK_RECOVERY_COMPLETED, network_num=connection.network_num, more_info=str(recovered_txs_source)) if block_hash in self._node.blocks_seen.contents: block_stats.add_block_event_by_block_hash( block_hash, BlockStatEventType.BLOCK_DECOMPRESSED_IGNORE_SEEN, start_date_time=block_info.start_datetime, end_date_time=block_info.end_datetime, network_num=connection.network_num, prev_block_hash=block_info.prev_block_hash, original_size=block_info.original_size, compressed_size=block_info.compressed_size, txs_count=block_info.txn_count, blockchain_network=self._node.opts.blockchain_network, blockchain_protocol=self._node.opts.blockchain_protocol, matching_block_hash=block_info.compressed_block_hash, matching_block_type=StatBlockType.COMPRESSED.value, more_info=stats_format.duration(block_info.duration_ms)) self._node.track_block_from_bdn_handling_ended(block_hash) transaction_service.track_seen_short_ids(block_hash, all_sids) connection.log_info("Discarding duplicate block {} from the BDN.", block_hash) if block_message is not None: self._node.on_block_received_from_bdn(block_hash, block_message) if self._node.block_queuing_service_manager.get_block_data( block_hash) is None: self._node.block_queuing_service_manager.store_block_data( block_hash, block_message) return if not recovered: connection.log_info("Received block {} from the BDN.", block_hash) else: connection.log_info("Successfully recovered block {}.", block_hash) if block_message is not None: compression_rate = block_info.compression_rate assert compression_rate is not None block_stats.add_block_event_by_block_hash( block_hash, BlockStatEventType.BLOCK_DECOMPRESSED_SUCCESS, start_date_time=block_info.start_datetime, end_date_time=block_info.end_datetime, network_num=connection.network_num, prev_block_hash=block_info.prev_block_hash, original_size=block_info.original_size, compressed_size=block_info.compressed_size, txs_count=block_info.txn_count, blockchain_network=self._node.opts.blockchain_network, blockchain_protocol=self._node.opts.blockchain_protocol, matching_block_hash=block_info.compressed_block_hash, matching_block_type=StatBlockType.COMPRESSED.value, more_info="Compression rate {}, Decompression time {}, " "Queued behind {} blocks".format( stats_format.percentage(compression_rate), stats_format.duration(block_info.duration_ms), self._node.block_queuing_service_manager. get_length_of_each_queuing_service_stats_format())) self._on_block_decompressed(block_message) if recovered or self._node.block_queuing_service_manager.is_in_any_queuing_service( block_hash): self._node.block_queuing_service_manager.update_recovered_block( block_hash, block_message) else: self._node.block_queuing_service_manager.push( block_hash, block_message) gateway_bdn_performance_stats_service.log_block_from_bdn() self._node.on_block_received_from_bdn(block_hash, block_message) transaction_service.track_seen_short_ids(block_hash, all_sids) self._node.publish_block(None, block_hash, block_message, FeedSource.BDN_SOCKET) self._node.log_blocks_network_content(self._node.network_num, block_message) else: if self._node.block_queuing_service_manager.is_in_any_queuing_service( block_hash) and not recovered: connection.log_trace( "Handling already queued block again. Ignoring.") return self._node.block_recovery_service.add_block( bx_block, block_hash, unknown_sids, unknown_hashes) block_stats.add_block_event_by_block_hash( block_hash, BlockStatEventType.BLOCK_DECOMPRESSED_WITH_UNKNOWN_TXS, start_date_time=block_info.start_datetime, end_date_time=block_info.end_datetime, network_num=connection.network_num, prev_block_hash=block_info.prev_block_hash, original_size=block_info.original_size, compressed_size=block_info.compressed_size, txs_count=block_info.txn_count, blockchain_network=self._node.opts.blockchain_network, blockchain_protocol=self._node.opts.blockchain_protocol, matching_block_hash=block_info.compressed_block_hash, matching_block_type=StatBlockType.COMPRESSED.value, more_info="{} sids, {} hashes, [{},...]".format( len(unknown_sids), len(unknown_hashes), unknown_sids[:5])) connection.log_info( "Block {} requires short id recovery. Querying BDN...", block_hash) self.start_transaction_recovery(unknown_sids, unknown_hashes, block_hash, connection) if recovered: # should never happen –– this should not be called on blocks that have not recovered connection.log_error(log_messages.BLOCK_DECOMPRESSION_FAILURE, block_hash) else: self._node.block_queuing_service_manager.push( block_hash, waiting_for_recovery=True)
def _process_and_broadcast_block( self, block_message, connection: AbstractGatewayBlockchainConnection) -> None: """ Compresses and propagates block message if enabled, else return. :param block_message: block message to propagate :param connection: receiving connection (AbstractBlockchainConnection) """ block_hash = block_message.block_hash() message_converter = self._node.message_converter assert message_converter is not None try: bx_block, block_info = message_converter.block_to_bx_block( block_message, self._node.get_tx_service(), self._node.opts.enable_block_compression, self._node.network.min_tx_age_seconds) except MessageConversionError as e: block_stats.add_block_event_by_block_hash( e.msg_hash, BlockStatEventType.BLOCK_CONVERSION_FAILED, network_num=connection.network_num, conversion_type=e.conversion_type.value) connection.log_error(log_messages.BLOCK_COMPRESSION_FAIL, e.msg_hash, e) return if block_info.ignored_short_ids: assert block_info.ignored_short_ids is not None logger.debug("Ignoring {} new SIDs for {}: {}", len(block_info.ignored_short_ids), block_info.block_hash, block_info.ignored_short_ids) compression_rate = block_info.compression_rate assert compression_rate is not None block_stats.add_block_event_by_block_hash( block_hash, BlockStatEventType.BLOCK_COMPRESSED, start_date_time=block_info.start_datetime, end_date_time=block_info.end_datetime, network_num=connection.network_num, prev_block_hash=block_info.prev_block_hash, original_size=block_info.original_size, txs_count=block_info.txn_count, blockchain_network=self._node.opts.blockchain_network, blockchain_protocol=self._node.opts.blockchain_protocol, matching_block_hash=block_info.compressed_block_hash, matching_block_type=StatBlockType.COMPRESSED.value, more_info="Compression: {}->{} bytes, {}, {}; Tx count: {}".format( block_info.original_size, block_info.compressed_size, stats_format.percentage(compression_rate), stats_format.duration(block_info.duration_ms), block_info.txn_count)) if self._node.opts.dump_short_id_mapping_compression: mapping = {} for short_id in block_info.short_ids: tx_hash = self._node.get_tx_service().get_transaction( short_id).hash assert tx_hash is not None mapping[short_id] = convert.bytes_to_hex(tx_hash.binary) with open( f"{self._node.opts.dump_short_id_mapping_compression_path}/" f"{convert.bytes_to_hex(block_hash.binary)}", "w") as f: f.write(str(mapping)) self._process_and_broadcast_compressed_block(bx_block, connection, block_info, block_hash) self._node.log_blocks_network_content(self._node.network_num, block_message)
def process_compact_block( self, block_message: CompactBlockBtcMessage, connection: BtcNodeConnection ) -> CompactBlockCompressionResult: """ Process compact block for processing on timeout if hold message received. If no hold exists, compress and broadcast block immediately. :param block_message: compact block message to process :param connection: receiving connection (AbstractBlockchainConnection) """ block_hash = block_message.block_hash() parse_result = self._node.message_converter.compact_block_to_bx_block( # pyre-ignore block_message, self._node.get_tx_service() ) block_info = parse_result.block_info if parse_result.success: block_stats.add_block_event_by_block_hash( block_hash, BlockStatEventType.COMPACT_BLOCK_COMPRESSED_SUCCESS, network_num=connection.network_num, start_date_time=block_info.start_datetime, end_date_time=block_info.end_datetime, duration=block_info.duration_ms / 1000, success=parse_result.success, txs_count=parse_result.block_info.txn_count, prev_block_hash=parse_result.block_info.prev_block_hash, original_size=block_info.original_size, more_info="Compression: {}->{} bytes, {}, {}; Tx count: {}".format( block_info.original_size, block_info.compressed_size, stats_format.percentage(block_info.compression_rate), stats_format.duration(block_info.duration_ms), block_info.txn_count) ) self._node.block_cleanup_service.on_new_block_received(block_hash, block_message.prev_block_hash()) self._process_and_broadcast_compressed_block( parse_result.bx_block, connection, parse_result.block_info, block_hash ) else: missing_indices = parse_result.missing_indices missing_indices_count = 0 if missing_indices is None else len(missing_indices) start_datetime = block_info.start_datetime end_datetime = datetime.utcnow() duration = (end_datetime - start_datetime).total_seconds() block_stats.add_block_event_by_block_hash( block_hash, BlockStatEventType.COMPACT_BLOCK_COMPRESSED_FAILED, network_num=connection.network_num, start_date_time=start_datetime, end_date_time=end_datetime, duration=duration, success=parse_result.success, missing_short_id_count=missing_indices_count, more_info="{:.2f}ms".format( duration * 1000 ) ) logger.warning(log_messages.UNKNOWN_SHORT_IDS, missing_indices_count) return parse_result
def msg_consensus(self, msg: OntConsensusMessage): if not self.node.opts.is_consensus: return if msg.consensus_data_type() != ont_constants.BLOCK_PROPOSAL_CONSENSUS_MESSAGE_TYPE: return block_hash = msg.block_hash() node = self.connection.node if not node.should_process_block_hash(block_hash): return node.block_cleanup_service.on_new_block_received(block_hash, msg.prev_block_hash()) block_stats.add_block_event_by_block_hash(block_hash, BlockStatEventType.BLOCK_RECEIVED_FROM_BLOCKCHAIN_NODE, network_num=self.connection.network_num, broadcast_type=BroadcastMessageType.CONSENSUS, more_info="Protocol: {}, Network: {}".format( node.opts.blockchain_protocol, node.opts.blockchain_network ), msg_size=len(msg.rawbytes()) ) if block_hash in self.connection.node.blocks_seen.contents: self.node.on_block_seen_by_blockchain_node(block_hash, self.connection) block_stats.add_block_event_by_block_hash(block_hash, BlockStatEventType.BLOCK_RECEIVED_FROM_BLOCKCHAIN_NODE_IGNORE_SEEN, network_num=self.connection.network_num, broadcast_type=BroadcastMessageType.CONSENSUS) self.connection.log_info( "Discarding duplicate consensus block {} from local blockchain node.", block_hash ) return node.track_block_from_node_handling_started(block_hash) self.connection.log_info( "Processing consensus block {} from local blockchain node.", block_hash ) # Broadcast BlockHoldingMessage through relays and gateways conns = self.node.broadcast(BlockHoldingMessage(block_hash, self.node.network_num), broadcasting_conn=self.connection, prepend_to_queue=True, connection_types=[ConnectionType.RELAY_BLOCK, ConnectionType.GATEWAY]) if len(conns) > 0: block_stats.add_block_event_by_block_hash(block_hash, BlockStatEventType.BLOCK_HOLD_SENT_BY_GATEWAY_TO_PEERS, network_num=self.node.network_num, broadcast_type=BroadcastMessageType.CONSENSUS, peers=conns ) try: bx_block, block_info = self.node.consensus_message_converter.block_to_bx_block( msg, self.node.get_tx_service(), self.node.opts.enable_block_compression, self.node.network.min_tx_age_seconds ) except MessageConversionError as e: block_stats.add_block_event_by_block_hash( e.msg_hash, BlockStatEventType.BLOCK_CONVERSION_FAILED, network_num=self.connection.network_num, broadcast_type=BroadcastMessageType.CONSENSUS, conversion_type=e.conversion_type.value ) self.connection.log_error(log_messages.BLOCK_COMPRESSION_FAIL_ONT_CONSENSUS, e.msg_hash, e) return if block_info.ignored_short_ids: self.connection.log_debug( "Ignoring {} new SIDs for {}: {}", len(block_info.ignored_short_ids), block_info.block_hash, block_info.ignored_short_ids ) block_stats.add_block_event_by_block_hash(block_hash, BlockStatEventType.BLOCK_COMPRESSED, start_date_time=block_info.start_datetime, end_date_time=block_info.end_datetime, network_num=self.connection.network_num, broadcast_type=BroadcastMessageType.CONSENSUS, prev_block_hash=block_info.prev_block_hash, original_size=block_info.original_size, txs_count=block_info.txn_count, blockchain_network=self.node.opts.blockchain_network, blockchain_protocol=self.node.opts.blockchain_protocol, matching_block_hash=block_info.compressed_block_hash, matching_block_type=StatBlockType.COMPRESSED.value, more_info="Consensus compression: {}->{} bytes, {}, {}; " "Tx count: {}".format( block_info.original_size, block_info.compressed_size, stats_format.percentage(block_info.compression_rate), stats_format.duration(block_info.duration_ms), block_info.txn_count ) ) self.node.block_processing_service._process_and_broadcast_compressed_block( bx_block, self.connection, block_info, block_hash )
def _handle_decrypted_consensus_block( self, bx_block: memoryview, connection: AbstractRelayConnection, encrypted_block_hash_hex: Optional[str] = None, recovered: bool = False, recovered_txs_source: Optional[RecoveredTxsSource] = None ): transaction_service = self._node.get_tx_service() if self._node.has_active_blockchain_peer() or self._node.remote_node_conn: try: block_message, block_info, unknown_sids, unknown_hashes = \ self._node.consensus_message_converter.bx_block_to_block(bx_block, transaction_service) except MessageConversionError as e: block_stats.add_block_event_by_block_hash( e.msg_hash, BlockStatEventType.BLOCK_CONVERSION_FAILED, network_num=connection.network_num, broadcast_type=BroadcastMessageType.CONSENSUS, conversion_type=e.conversion_type.value ) transaction_service.on_block_cleaned_up(e.msg_hash) connection.log_warning(log_messages.FAILED_TO_DECOMPRESS_BLOCK_ONT_CONSENSUS, e.msg_hash, e) return else: connection.log_warning(log_messages.LACK_BLOCKCHAIN_CONNECTION_ONT_CONSENSUS) return block_hash = block_info.block_hash all_sids = block_info.short_ids if encrypted_block_hash_hex is not None: block_stats.add_block_event_by_block_hash(block_hash, BlockStatEventType.BLOCK_TO_ENC_BLOCK_MATCH, matching_block_hash=encrypted_block_hash_hex, matching_block_type=StatBlockType.ENCRYPTED.value, network_num=connection.network_num, broadcast_type=BroadcastMessageType.CONSENSUS) self.cancel_hold_timeout(block_hash, connection) if recovered: block_stats.add_block_event_by_block_hash(block_hash, BlockStatEventType.BLOCK_RECOVERY_COMPLETED, network_num=connection.network_num, broadcast_type=BroadcastMessageType.CONSENSUS, more_info=str(recovered_txs_source)) if block_hash in self._node.blocks_seen.contents: block_stats.add_block_event_by_block_hash(block_hash, BlockStatEventType.BLOCK_DECOMPRESSED_IGNORE_SEEN, start_date_time=block_info.start_datetime, end_date_time=block_info.end_datetime, network_num=connection.network_num, broadcast_type=BroadcastMessageType.CONSENSUS, prev_block_hash=block_info.prev_block_hash, original_size=block_info.original_size, compressed_size=block_info.compressed_size, txs_count=block_info.txn_count, blockchain_network=self._node.opts.blockchain_protocol, blockchain_protocol=self._node.opts.blockchain_network, matching_block_hash=block_info.compressed_block_hash, matching_block_type=StatBlockType.COMPRESSED.value, more_info=stats_format.duration(block_info.duration_ms)) self._node.track_block_from_bdn_handling_ended(block_hash) transaction_service.track_seen_short_ids(block_hash, all_sids) connection.log_info( "Discarding duplicate consensus block {} from the BDN.", block_hash ) return if not recovered: connection.log_info("Received consensus block {} from the BDN.", block_hash) else: connection.log_info("Successfully recovered consensus block {}.", block_hash) if block_message is not None: block_stats.add_block_event_by_block_hash(block_hash, BlockStatEventType.BLOCK_DECOMPRESSED_SUCCESS, start_date_time=block_info.start_datetime, end_date_time=block_info.end_datetime, network_num=connection.network_num, broadcast_type=BroadcastMessageType.CONSENSUS, prev_block_hash=block_info.prev_block_hash, original_size=block_info.original_size, compressed_size=block_info.compressed_size, txs_count=block_info.txn_count, blockchain_network=self._node.opts.blockchain_protocol, blockchain_protocol=self._node.opts.blockchain_network, matching_block_hash=block_info.compressed_block_hash, matching_block_type=StatBlockType.COMPRESSED.value, peer=connection.peer_desc, more_info="Consensus compression rate {}, Decompression time {}, " "Queued behind {} blocks".format( stats_format.percentage(block_info.compression_rate), stats_format.duration(block_info.duration_ms), self._node.block_queueing_service_manager.get_length_of_each_queuing_service_stats_format())) if recovered or block_hash in self._node.block_queueing_service_manager: self._node.block_queueing_service_manager.update_recovered_block(block_hash, block_message) else: self._node.block_queueing_service_manager.push(block_hash, block_message) self._node.block_recovery_service.cancel_recovery_for_block(block_hash) # self._node.blocks_seen.add(block_hash) transaction_service.track_seen_short_ids(block_hash, all_sids) else: if block_hash in self._node.block_queueing_service_manager and not recovered: connection.log_trace("Handling already queued consensus block again. Ignoring.") return self._node.block_recovery_service.add_block(bx_block, block_hash, unknown_sids, unknown_hashes) block_stats.add_block_event_by_block_hash(block_hash, BlockStatEventType.BLOCK_DECOMPRESSED_WITH_UNKNOWN_TXS, start_date_time=block_info.start_datetime, end_date_time=block_info.end_datetime, network_num=connection.network_num, broadcast_type=BroadcastMessageType.CONSENSUS, prev_block_hash=block_info.prev_block_hash, original_size=block_info.original_size, compressed_size=block_info.compressed_size, txs_count=block_info.txn_count, blockchain_network=self._node.opts.blockchain_protocol, blockchain_protocol=self._node.opts.blockchain_network, matching_block_hash=block_info.compressed_block_hash, matching_block_type=StatBlockType.COMPRESSED.value, more_info="{} sids, {} hashes".format( len(unknown_sids), len(unknown_hashes))) connection.log_info("Consensus block {} requires short id recovery. Querying BDN...", block_hash) self.start_transaction_recovery(unknown_sids, unknown_hashes, block_hash, connection) if recovered: # should never happen –– this should not be called on blocks that have not recovered connection.log_error(log_messages.BLOCK_DECOMPRESSION_FAILURE_ONT_CONSENSUS, block_hash) else: self._node.block_queueing_service_manager.push(block_hash, waiting_for_recovery=True)