def block_to_bx_block(self, block_msg: BlockOntMessage, tx_service: ExtensionTransactionService) -> \ Tuple[memoryview, BlockInfo]: compress_start_datetime = datetime.utcnow() compress_start_timestamp = time.time() self._default_block_size = max(self._default_block_size, len(block_msg.buf)) tsk = self.compression_tasks.borrow_task() tsk.init(tpe.InputBytes(block_msg.buf), tx_service.proxy) try: task_pool_proxy.run_task(tsk) except tpe.AggregatedException as e: self.compression_tasks.return_task(tsk) raise message_conversion_error.btc_block_compression_error( block_msg.block_hash(), e) bx_block = tsk.bx_block() block = memoryview(bx_block) compressed_size = len(block) original_size = len(block_msg.rawbytes()) block_hash = OntObjectHash( binary=convert.hex_to_bytes(tsk.block_hash().hex_string())) block_info = BlockInfo( block_hash, tsk.short_ids(), compress_start_datetime, datetime.utcnow(), (time.time() - compress_start_timestamp) * 1000, tsk.txn_count(), tsk.compressed_block_hash().hex_string(), tsk.prev_block_hash().hex_string(), original_size, compressed_size, 100 - float(compressed_size) / original_size * 100) self.compression_tasks.return_task(tsk) return block, block_info
def _get_sample_block(self): root_dir = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) with open(os.path.join(root_dir, "samples/ont_sample_block.txt")) as sample_file: ont_block = sample_file.read().strip("\n") buf = bytearray(convert.hex_to_bytes(ont_block)) parsed_block = BlockOntMessage(buf=buf) self.magic = parsed_block.magic() self.version = parsed_block.version() return parsed_block
def clean_block_transactions( self, block_msg: BlockOntMessage, transaction_service: TransactionService) -> None: block_short_ids = [] block_unknown_tx_hashes = [] start_time = time.time() short_ids_count = 0 unknown_tx_hashes_count = 0 transactions_processed = 0 tx_hash_to_contents_len_before_cleanup = transaction_service.get_tx_hash_to_contents_len( ) short_id_count_before_cleanup = transaction_service.get_short_id_count( ) for tx in block_msg.txns(): tx_hash = OntObjectHash(buf=crypto.double_sha256(tx), length=ont_constants.ONT_HASH_LEN) short_ids = transaction_service.remove_transaction_by_tx_hash( tx_hash, force=True) if short_ids is None: unknown_tx_hashes_count += 1 block_unknown_tx_hashes.append(tx_hash) else: short_ids_count += len(short_ids) block_short_ids.extend(short_ids) transactions_processed += 1 block_hash = block_msg.block_hash() transaction_service.on_block_cleaned_up(block_hash) end_time = time.time() duration = end_time - start_time tx_hash_to_contents_len_after_cleanup = transaction_service.get_tx_hash_to_contents_len( ) short_id_count_after_cleanup = transaction_service.get_short_id_count() logger.debug( "Finished cleaning up block {}. Processed {} hashes, {} of which were unknown, and cleaned up {} " "short ids. Took {:.3f}s.", block_hash, transactions_processed, unknown_tx_hashes_count, short_ids_count, duration) transaction_service.log_block_transaction_cleanup_stats( block_hash, block_msg.txn_count(), tx_hash_to_contents_len_before_cleanup, tx_hash_to_contents_len_after_cleanup, short_id_count_before_cleanup, short_id_count_after_cleanup) self._block_hash_marked_for_cleanup.discard(block_hash) self.node.post_block_cleanup_tasks( block_hash=block_hash, short_ids=block_short_ids, unknown_tx_hashes=block_unknown_tx_hashes)
def get_sample_block(): root_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) with open(os.path.join(root_dir, "ont_sample_block.txt")) as sample_file: ont_block = sample_file.read().strip("\n") buf = bytearray(convert.hex_to_bytes(ont_block)) parsed_block = BlockOntMessage(buf=buf) return parsed_block
def msg_block(self, msg: BlockOntMessage) -> None: block_stats.add_block_event_by_block_hash( msg.block_hash(), BlockStatEventType.REMOTE_BLOCK_RECEIVED_BY_GATEWAY, network_num=self.connection.network_num, more_info="Protocol: {}, Network: {}".format( self.connection.node.opts.blockchain_protocol, self.connection.node.opts.blockchain_network)) return self.msg_proxy_response(msg)
def build_ont_block(block_pieces: Deque[Union[bytearray, memoryview]], size: int) -> Tuple[BlockOntMessage, int]: ont_block = bytearray(size - ont_constants.ONT_HASH_LEN) offset = 0 for piece in block_pieces: next_offset = offset + len(piece) ont_block[offset:next_offset] = piece offset = next_offset return BlockOntMessage(buf=ont_block), offset
def test_msg_block_too_old(self): block_timestamp = int(time.time()) - 1 - \ self.node.opts.blockchain_ignore_block_interval_count * \ self.node.opts.blockchain_block_interval message = BlockOntMessage(0, 0, self.HASH, self.HASH, self.HASH, block_timestamp, 0, 0, bytes(10), bytes(20), [bytes(33)] * 5, [bytes(2)] * 3, [bytes(32)] * 5, self.HASH) self.sut.msg_block(message) self.node.block_processing_service.queue_block_for_processing.assert_not_called()
def parse_bx_block_header(bx_block: memoryview, block_pieces: Deque[Union[bytearray, memoryview]]) -> \ BlockHeaderInfo: block_offsets = compact_block_short_ids_serializer.get_bx_block_offsets( bx_block) short_ids, short_ids_len = compact_block_short_ids_serializer.deserialize_short_ids_from_buffer( bx_block, block_offsets.short_id_offset) reconstructed_block_message = BlockOntMessage( buf=bx_block[block_offsets.block_begin_offset + ont_constants.ONT_HASH_LEN + 1:]) block_hash = reconstructed_block_message.block_hash() txn_count = reconstructed_block_message.txn_count() offset = reconstructed_block_message.txn_offset( ) + block_offsets.block_begin_offset + ont_constants.ONT_HASH_LEN + 1 # Add header piece block_pieces.append(bx_block[block_offsets.block_begin_offset + ont_constants.ONT_HASH_LEN + 1:offset]) return BlockHeaderInfo(block_offsets, short_ids, short_ids_len, block_hash, offset, txn_count)
def msg_block(self, msg: BlockOntMessage) -> None: block_hash = msg.block_hash() if not self.node.should_process_block_hash(block_hash): return if self.node.block_cleanup_service.is_marked_for_cleanup(block_hash): self.connection.log_trace("Marked block for cleanup: {}", block_hash) self.node.block_cleanup_service.clean_block_transactions( transaction_service=self.node.get_tx_service(), block_msg=msg) else: self.process_msg_block(msg) # After receiving block message sending INV message for the same block to Ontology node # This is needed to update Synced Headers value of the gateway peer on the Ontology node # If Synced Headers is not up-to-date than Ontology node does not push compact blocks to the gateway inv_msg = InvOntMessage(magic=self.node.opts.blockchain_net_magic, inv_type=InventoryOntType.MSG_BLOCK, blocks=[block_hash]) self.connection.enqueue_msg(inv_msg) self.node.update_current_block_height(msg.height(), block_hash)
def test_peek_message_success_all_types(self): self.get_message_preview_successfully(self.VERSION_ONT_MESSAGE, VersionOntMessage.MESSAGE_TYPE, 83) self.get_message_preview_successfully( VerAckOntMessage(self.MAGIC, True), VerAckOntMessage.MESSAGE_TYPE, 1) self.get_message_preview_successfully(PingOntMessage(self.MAGIC), PingOntMessage.MESSAGE_TYPE, 8) self.get_message_preview_successfully(PongOntMessage(self.MAGIC, 123), PongOntMessage.MESSAGE_TYPE, 8) self.get_message_preview_successfully(GetAddrOntMessage(self.MAGIC), GetAddrOntMessage.MESSAGE_TYPE, 0) self.get_message_preview_successfully( AddrOntMessage( self.MAGIC, [(int(time.time()), 123, "127.0.0.1", 20300, 20200, 1234)]), AddrOntMessage.MESSAGE_TYPE, 52) self.get_message_preview_successfully( OntConsensusMessage(self.MAGIC, self.VERSION, bytes(20)), OntConsensusMessage.MESSAGE_TYPE, 24) self.get_message_preview_successfully( InvOntMessage(self.MAGIC, InventoryOntType.MSG_TX, [self.HASH, self.HASH]), InvOntMessage.MESSAGE_TYPE, 69) self.get_message_preview_successfully( GetDataOntMessage(self.MAGIC, 1, self.HASH), GetDataOntMessage.MESSAGE_TYPE, 33) self.get_message_preview_successfully( GetHeadersOntMessage(self.MAGIC, 1, self.HASH, self.HASH), GetHeadersOntMessage.MESSAGE_TYPE, 65) self.get_message_preview_successfully( GetBlocksOntMessage(self.MAGIC, 1, self.HASH, self.HASH), GetBlocksOntMessage.MESSAGE_TYPE, 65) self.get_message_preview_successfully( TxOntMessage(self.MAGIC, self.VERSION, bytes(20)), TxOntMessage.MESSAGE_TYPE, 21) self.get_message_preview_successfully( BlockOntMessage(self.MAGIC, self.VERSION, self.HASH, self.HASH, self.HASH, 0, 0, 0, bytes(10), bytes(20), [bytes(33)] * 5, [bytes(2)] * 3, [bytes(32)] * 5, self.HASH), BlockOntMessage.MESSAGE_TYPE, 524) self.get_message_preview_successfully( HeadersOntMessage(self.MAGIC, [bytes(1)] * 2), HeadersOntMessage.MESSAGE_TYPE, 6) self.get_message_preview_successfully( NotFoundOntMessage(self.MAGIC, self.HASH), NotFoundOntMessage.MESSAGE_TYPE, 32)
def bx_block_to_block( self, bx_block_msg: memoryview, tx_service: ExtensionTransactionService ) -> BlockDecompressionResult: decompress_start_datetime = datetime.utcnow() decompress_start_timestamp = time.time() tsk = self.decompression_tasks.borrow_task() tsk.init(tpe.InputBytes(bx_block_msg), tx_service.proxy) try: task_pool_proxy.run_task(tsk) except tpe.AggregatedException as e: self.decompression_tasks.return_task(tsk) header_info = ont_normal_message_converter.parse_bx_block_header( bx_block_msg, deque()) raise message_conversion_error.btc_block_decompression_error( header_info.block_hash, e) total_tx_count = tsk.tx_count() unknown_tx_hashes = [ Sha256Hash(bytearray(unknown_tx_hash.binary())) for unknown_tx_hash in tsk.unknown_tx_hashes() ] unknown_tx_sids = tsk.unknown_tx_sids() block_hash = OntObjectHash( binary=convert.hex_to_bytes(tsk.block_hash().hex_string())) if tsk.success(): ont_block_msg = BlockOntMessage( buf=memoryview(tsk.block_message())) logger.debug( "Successfully parsed block broadcast message. {} transactions " "in block {}", total_tx_count, block_hash) else: ont_block_msg = None logger.debug( "Block recovery needed for {}. Missing {} sids, {} tx hashes. " "Total txs in block: {}", block_hash, len(unknown_tx_sids), len(unknown_tx_hashes), total_tx_count) block_info = get_block_info(bx_block_msg, block_hash, tsk.short_ids(), decompress_start_datetime, decompress_start_timestamp, total_tx_count, ont_block_msg) self.decompression_tasks.return_task(tsk) return BlockDecompressionResult(ont_block_msg, block_info, unknown_tx_sids, unknown_tx_hashes)
def test_parse_message_success_all_types(self): self.create_message_successfully(self.VERSION_ONT_MESSAGE, VersionOntMessage) self.create_message_successfully(VerAckOntMessage(self.MAGIC, False), VerAckOntMessage) self.create_message_successfully(PingOntMessage(self.MAGIC), PingOntMessage) self.create_message_successfully(PongOntMessage(self.MAGIC, 123), PongOntMessage) self.create_message_successfully(GetAddrOntMessage(self.MAGIC), GetAddrOntMessage) self.create_message_successfully( AddrOntMessage( self.MAGIC, [(int(time.time()), 123, "127.0.0.1", 20300, 20200, 1234)]), AddrOntMessage) self.create_message_successfully( OntConsensusMessage(self.MAGIC, self.VERSION, bytes(20)), OntConsensusMessage) self.create_message_successfully( InvOntMessage(self.MAGIC, InventoryOntType.MSG_TX, [self.HASH, self.HASH]), InvOntMessage) self.create_message_successfully( GetDataOntMessage(self.MAGIC, 1, self.HASH), GetDataOntMessage) self.create_message_successfully( GetHeadersOntMessage(self.MAGIC, 1, self.HASH, self.HASH), GetHeadersOntMessage) self.create_message_successfully( GetBlocksOntMessage(self.MAGIC, 1, self.HASH, self.HASH), GetBlocksOntMessage) self.create_message_successfully( TxOntMessage(self.MAGIC, self.VERSION, bytes(20)), TxOntMessage) self.create_message_successfully( BlockOntMessage(self.MAGIC, self.VERSION, self.HASH, self.HASH, self.HASH, 0, 0, 0, bytes(10), bytes(20), [bytes(33)] * 5, [bytes(2)] * 3, [bytes(32)] * 5, self.HASH), BlockOntMessage) self.create_message_successfully( HeadersOntMessage(self.MAGIC, [bytes(1)] * 2), HeadersOntMessage) self.create_message_successfully( NotFoundOntMessage(self.MAGIC, self.HASH), NotFoundOntMessage)
def bx_block_to_block(self, bx_block_msg: memoryview, tx_service: TransactionService) -> BlockDecompressionResult: start_datetime = datetime.utcnow() start_time = time.time() block_msg = BlockOntMessage(buf=bytearray(bx_block_msg)) block_info = BlockInfo( block_msg.block_hash(), [], start_datetime, datetime.utcnow(), (time.time() - start_time) * 1000, block_msg.txn_count(), str(block_msg.block_hash()), convert.bytes_to_hex(block_msg.prev_block_hash().binary), len(block_msg.rawbytes()), len(block_msg.rawbytes()), 0 ) return BlockDecompressionResult(block_msg, block_info, [], [])
def test_ont_block_to_bloxroute_block_and_back_sids_found(self): prev_block_hash = bytearray(crypto.bitcoin_hash(b"123")) prev_block = OntObjectHash(prev_block_hash, length=SHA256_HASH_LEN) merkle_root_hash = bytearray(crypto.bitcoin_hash(b"234")) merkle_root = OntObjectHash(merkle_root_hash, length=SHA256_HASH_LEN) txns_root_hash = bytearray(crypto.bitcoin_hash(b"345")) txns_root = OntObjectHash(txns_root_hash, length=SHA256_HASH_LEN) block_root_hash = bytearray(crypto.bitcoin_hash(b"456")) block_root = OntObjectHash(block_root_hash, length=SHA256_HASH_LEN) consensus_payload = bytes(b'111') next_bookkeeper = bytes(b'222') bookkeepers = [bytes(33)] * 5 sig_data = [bytes(2)] * 3 txns = [] timestamp = 1 height = 2 consensus_data = 3 ont_block = BlockOntMessage(self.magic, self.version, prev_block, txns_root, block_root, timestamp, height, consensus_data, consensus_payload, next_bookkeeper, bookkeepers, sig_data, txns, merkle_root) block_hash = ont_block.block_hash() bloxroute_block, block_info = self.ont_message_converter.block_to_bx_block( ont_block, self.tx_service, True, 0) self.assertEqual(0, block_info.txn_count) self.assertEqual(self.short_ids, list(block_info.short_ids)) self.assertEqual(ont_block.block_hash(), block_info.block_hash) parsed_ont_block, block_info, _, _ = self.ont_message_converter.bx_block_to_block( bloxroute_block, self.tx_service) self.assertIsNotNone(block_info) self.assertEqual(ont_block.rawbytes().tobytes(), parsed_ont_block.rawbytes().tobytes()) self.assertEqual(self.magic, parsed_ont_block.magic()) self.assertEqual( prev_block_hash, parsed_ont_block.prev_block_hash().get_little_endian()) self.assertEqual(ont_block.checksum(), parsed_ont_block.checksum()) self.assertEqual(block_hash, parsed_ont_block.block_hash()) self.assertEqual(block_hash.binary, block_info.block_hash.binary) self.assertEqual(timestamp, parsed_ont_block.timestamp())
def block_to_bx_block(self, block_msg: BlockOntMessage, tx_service: TransactionService) \ -> Tuple[memoryview, BlockInfo]: start_datetime = datetime.utcnow() start_time = time.time() block_info = BlockInfo( block_msg.block_hash(), [], start_datetime, datetime.utcnow(), (time.time() - start_time) * 1000, block_msg.txn_count(), str(block_msg.block_hash()), convert.bytes_to_hex(block_msg.prev_block_hash().binary), len(block_msg.rawbytes()), len(block_msg.rawbytes()), 0 ) return block_msg.rawbytes(), block_info
def clean_block_transactions( self, block_msg: BlockOntMessage, transaction_service: TransactionService) -> None: start_datetime = datetime.utcnow() start_time = time.time() tx_hash_to_contents_len_before_cleanup = transaction_service.get_tx_hash_to_contents_len( ) cleanup_task = self.block_cleanup_tasks.borrow_task() tx_service = typing.cast(ExtensionTransactionService, transaction_service) cleanup_task.init(tpe.InputBytes(block_msg.buf), tx_service.proxy) init_time = time.time() task_pool_proxy.run_task(cleanup_task) task_run_time = time.time() unknown_tx_hashes_count = len(cleanup_task.unknown_tx_hashes()) tx_property_fetch_time = time.time() short_ids = cleanup_task.short_ids() short_ids_fetch_time = time.time() short_ids_count = len(short_ids) tx_service.update_removed_transactions( cleanup_task.total_content_removed(), short_ids) remove_from_tx_service_time = time.time() # TODO : clean the short ids/transactions from the alarm queue after refactoring the transaction service block_hash = block_msg.block_hash() tx_service.on_block_cleaned_up(block_hash) tx_hash_to_contents_len_after_cleanup = transaction_service.get_tx_hash_to_contents_len( ) end_datetime = datetime.utcnow() end_time = time.time() logger.statistics({ "type": "BlockTransactionsCleanup", "block_hash": repr(block_hash), "unknown_tx_hashes_count": unknown_tx_hashes_count, "short_ids_count": short_ids_count, "block_transactions_count": cleanup_task.txn_count(), "start_datetime": start_datetime, "end_datetime": end_datetime, "task_init_time": init_time - start_time, "task_run_time": task_run_time - init_time, "tx_property_fetch_time": tx_property_fetch_time - task_run_time, "short_ids_fetch_time": short_ids_fetch_time - tx_property_fetch_time, "remove_from_tx_service_time": remove_from_tx_service_time - short_ids_fetch_time, "duration": end_time - start_time, "tx_hash_to_contents_len_before_cleanup": tx_hash_to_contents_len_before_cleanup, "tx_hash_to_contents_len_after_cleanup": tx_hash_to_contents_len_after_cleanup, }) self.block_cleanup_tasks.return_task(cleanup_task) self._block_hash_marked_for_cleanup.discard(block_hash) self.node.post_block_cleanup_tasks( block_hash=block_hash, short_ids=short_ids, unknown_tx_hashes=( Sha256Hash(convert.hex_to_bytes(tx_hash.hex_string())) for tx_hash in cleanup_task.unknown_tx_hashes()))
def block_to_bx_block(self, block_msg: BlockOntMessage, tx_service: TransactionService) -> \ Tuple[memoryview, BlockInfo]: """ Compresses a Ontology block's transactions and packs it into a bloXroute block. """ compress_start_datetime = datetime.utcnow() compress_start_timestamp = time.time() size = 0 buf = deque() short_ids = [] header = block_msg.txn_header() size += len(header) buf.append(header) for tx in block_msg.txns(): tx_hash, _ = ont_messages_util.get_txid(tx) short_id = tx_service.get_short_id(tx_hash) if short_id == constants.NULL_TX_SID: buf.append(tx) size += len(tx) else: short_ids.append(short_id) buf.append(ont_constants.ONT_SHORT_ID_INDICATOR_AS_BYTEARRAY) size += 1 serialized_short_ids = compact_block_short_ids_serializer.serialize_short_ids_into_bytes( short_ids) buf.append(serialized_short_ids) size += constants.UL_ULL_SIZE_IN_BYTES merkle_root = block_msg.merkle_root() buf.appendleft(merkle_root) size += ont_constants.ONT_HASH_LEN is_consensus_msg_buf = struct.pack("?", False) buf.appendleft(is_consensus_msg_buf) size += 1 offset_buf = struct.pack("<Q", size) buf.appendleft(offset_buf) size += len(serialized_short_ids) block = bytearray(size) off = 0 for blob in buf: next_off = off + len(blob) block[off:next_off] = blob off = next_off prev_block_hash = convert.bytes_to_hex( block_msg.prev_block_hash().binary) bx_block_hash = convert.bytes_to_hex(crypto.double_sha256(block)) original_size = len(block_msg.rawbytes()) block_info = BlockInfo(block_msg.block_hash(), short_ids, compress_start_datetime, datetime.utcnow(), (time.time() - compress_start_timestamp) * 1000, block_msg.txn_count(), bx_block_hash, prev_block_hash, original_size, size, 100 - float(size) / original_size * 100) return memoryview(block), block_info
def build_block_header_message( self, block_hash: Sha256Hash, block_message: BlockOntMessage) -> HeadersOntMessage: return HeadersOntMessage(magic=block_message.magic(), headers=[block_message.header()])
def ont_transactions(self, block: BlockOntMessage = None) -> List[memoryview]: txs = block.txns() return [TxOntMessage(self.magic, self.version, tx[1:]) for tx in txs]