def parse_bx_block_header( bx_block: memoryview, block_pieces: Deque[Union[bytearray, memoryview]]) -> BlockHeaderInfo: block_offsets = compact_block_short_ids_serializer.get_bx_block_offsets( bx_block) short_ids, short_ids_len = compact_block_short_ids_serializer.deserialize_short_ids_from_buffer( bx_block, block_offsets.short_id_offset) # Compute block header hash block_header_size = \ block_offsets.block_begin_offset + \ btc_constants.BTC_HDR_COMMON_OFF + \ btc_constants.BTC_BLOCK_HDR_SIZE block_hash = BtcObjectHash(buf=crypto.bitcoin_hash( bx_block[block_offsets.block_begin_offset + btc_constants.BTC_HDR_COMMON_OFF:block_header_size]), length=btc_constants.BTC_SHA_HASH_LEN) offset = block_header_size # Add header piece txn_count, txn_count_size = btc_common_utils.btc_varint_to_int( bx_block, block_header_size) offset += txn_count_size block_pieces.append(bx_block[block_offsets.block_begin_offset:offset]) return BlockHeaderInfo(block_offsets, short_ids, short_ids_len, block_hash, offset, txn_count)
def parse_bx_block_header(bx_block: memoryview, block_pieces: Deque[Union[bytearray, memoryview]]) -> \ BlockHeaderInfo: block_offsets = compact_block_short_ids_serializer.get_bx_block_offsets(bx_block) short_ids, short_ids_len = compact_block_short_ids_serializer.deserialize_short_ids_from_buffer( bx_block, block_offsets.short_id_offset ) block_hash = OntObjectHash(binary=bx_block[block_offsets.block_begin_offset + 1: block_offsets.block_begin_offset + ont_constants.ONT_HASH_LEN + 1]) offset = block_offsets.block_begin_offset + ont_constants.ONT_HASH_LEN + 1 txn_count, = struct.unpack_from("<L", bx_block, offset) offset += ont_constants.ONT_INT_LEN payload_tail_len, = struct.unpack_from("<L", bx_block, offset) offset += ont_constants.ONT_INT_LEN + payload_tail_len owner_and_signature_len, = struct.unpack_from("<L", bx_block, offset) offset += ont_constants.ONT_INT_LEN + owner_and_signature_len consensus_payload_header_len, = struct.unpack_from("<L", bx_block, offset) offset += ont_constants.ONT_INT_LEN block_pieces.append(bx_block[offset: offset + consensus_payload_header_len]) offset += consensus_payload_header_len block_pieces.append(bx_block[offset: offset + ont_constants.ONT_CHAR_LEN]) offset += ont_constants.ONT_CHAR_LEN block_pieces.append(bx_block[offset: offset + ont_constants.ONT_INT_LEN]) offset += ont_constants.ONT_INT_LEN block_start_len_and_txn_header_total_len, = struct.unpack_from("<L", bx_block, offset) offset += ont_constants.ONT_INT_LEN block_pieces.append(bx_block[offset: offset + block_start_len_and_txn_header_total_len]) offset += block_start_len_and_txn_header_total_len return BlockHeaderInfo(block_offsets, short_ids, short_ids_len, block_hash, offset, txn_count)
def test_block_to_bx_block__empty_block_success(self): block = Block(mock_eth_messages.get_dummy_block_header(8), [], []) dummy_chain_difficulty = 10 block_msg = NewBlockEthProtocolMessage(None, block, dummy_chain_difficulty) self.assertTrue(block_msg.rawbytes()) internal_new_block_msg = InternalEthBlockInfo.from_new_block_msg(block_msg) bx_block_msg, block_info = self.eth_message_converter.block_to_bx_block( internal_new_block_msg, self.tx_service, True, 0 ) self.assertEqual(0, block_info.txn_count) self.assertEqual(convert.bytes_to_hex(block.header.prev_hash), block_info.prev_block_hash) self.assertTrue(bx_block_msg) self.assertIsInstance(bx_block_msg, memoryview) block_offsets = compact_block_short_ids_serializer.get_bx_block_offsets(bx_block_msg) _, short_ids_len = compact_block_short_ids_serializer.deserialize_short_ids_from_buffer( bx_block_msg, block_offsets.short_id_offset ) compact_block = rlp.decode( bx_block_msg[block_offsets.block_begin_offset: block_offsets.short_id_offset].tobytes(), CompactBlock ) self.assertTrue(compact_block) self.assertIsInstance(compact_block, CompactBlock) self._assert_values_equal(compact_block.header, block.header) self.assertEqual(0, len(compact_block.uncles)) self.assertEqual(0, len(compact_block.transactions)) self.assertEqual(compact_block.chain_difficulty, block_msg.chain_difficulty)
def _parse_block_eth(block_msg_bytes: Union[bytearray, memoryview]) -> Tuple[Sha256Hash, List[int], memoryview]: block_msg_bytes = block_msg_bytes if isinstance(block_msg_bytes, memoryview) else memoryview(block_msg_bytes) block_offsets = compact_block_short_ids_serializer.get_bx_block_offsets(block_msg_bytes) short_ids, _short_ids_bytes_len = compact_block_short_ids_serializer.deserialize_short_ids_from_buffer( block_msg_bytes, block_offsets.short_id_offset ) block_bytes = block_msg_bytes[block_offsets.block_begin_offset: block_offsets.short_id_offset] _, _block_itm_len, block_itm_start = rlp_utils.consume_length_prefix(block_bytes, 0) block_itm_bytes = block_bytes[block_itm_start:] _, block_hdr_len, block_hdr_start = rlp_utils.consume_length_prefix(block_itm_bytes, 0) full_hdr_bytes = block_itm_bytes[0:block_hdr_start + block_hdr_len] block_hash_bytes = eth_common_utils.keccak_hash(full_hdr_bytes) block_hash = Sha256Hash(block_hash_bytes) _, block_txs_len, block_txs_start = rlp_utils.consume_length_prefix( block_itm_bytes, block_hdr_start + block_hdr_len ) txs_bytes = block_itm_bytes[block_txs_start:block_txs_start + block_txs_len] return block_hash, short_ids, txs_bytes
def parse_bx_block_header(bx_block: memoryview, block_pieces: Deque[Union[bytearray, memoryview]]) -> \ BlockHeaderInfo: block_offsets = compact_block_short_ids_serializer.get_bx_block_offsets( bx_block) short_ids, short_ids_len = compact_block_short_ids_serializer.deserialize_short_ids_from_buffer( bx_block, block_offsets.short_id_offset) reconstructed_block_message = BlockOntMessage( buf=bx_block[block_offsets.block_begin_offset + ont_constants.ONT_HASH_LEN + 1:]) block_hash = reconstructed_block_message.block_hash() txn_count = reconstructed_block_message.txn_count() offset = reconstructed_block_message.txn_offset( ) + block_offsets.block_begin_offset + ont_constants.ONT_HASH_LEN + 1 # Add header piece block_pieces.append(bx_block[block_offsets.block_begin_offset + ont_constants.ONT_HASH_LEN + 1:offset]) return BlockHeaderInfo(block_offsets, short_ids, short_ids_len, block_hash, offset, txn_count)
def _get_compressed_block_header_bytes( self, compressed_block_bytes: Union[bytearray, memoryview] ) -> Union[bytearray, memoryview]: block_msg_bytes = compressed_block_bytes if isinstance( compressed_block_bytes, memoryview) else memoryview(compressed_block_bytes) block_offsets = compact_block_short_ids_serializer.get_bx_block_offsets( block_msg_bytes) block_bytes = block_msg_bytes[ block_offsets.block_begin_offset:block_offsets.short_id_offset] _, block_itm_len, block_itm_start = rlp_utils.consume_length_prefix( block_bytes, 0) block_itm_bytes = block_bytes[block_itm_start:] _, block_hdr_len, block_hdr_start = rlp_utils.consume_length_prefix( block_itm_bytes, 0) return block_itm_bytes[0:block_hdr_start + block_hdr_len]
def bx_block_to_block(self, bx_block_msg, tx_service) -> BlockDecompressionResult: """ Converts internal broadcast message to Ethereum new block message The code is optimized and does not make copies of bytes :param bx_block_msg: internal broadcast message bytes :param tx_service: Transactions service :return: tuple (new block message, block hash, unknown transaction short id, unknown transaction hashes) """ if not isinstance(bx_block_msg, (bytearray, memoryview)): raise TypeError( "Type bytearray is expected for arg block_bytes but was {0}". format(type(bx_block_msg))) decompress_start_datetime = datetime.datetime.utcnow() decompress_start_timestamp = time.time() block_msg_bytes = bx_block_msg if isinstance( bx_block_msg, memoryview) else memoryview(bx_block_msg) block_offsets = compact_block_short_ids_serializer.get_bx_block_offsets( bx_block_msg) short_ids, short_ids_bytes_len = compact_block_short_ids_serializer.deserialize_short_ids_from_buffer( bx_block_msg, block_offsets.short_id_offset) block_bytes = block_msg_bytes[ block_offsets.block_begin_offset:block_offsets.short_id_offset] _, block_itm_len, block_itm_start = rlp_utils.consume_length_prefix( block_bytes, 0) block_itm_bytes = block_bytes[block_itm_start:] _, block_hdr_len, block_hdr_start = rlp_utils.consume_length_prefix( block_itm_bytes, 0) full_hdr_bytes = block_itm_bytes[0:block_hdr_start + block_hdr_len] block_hash_bytes = eth_common_utils.keccak_hash(full_hdr_bytes) block_hash = Sha256Hash(block_hash_bytes) _, block_txs_len, block_txs_start = rlp_utils.consume_length_prefix( block_itm_bytes, block_hdr_start + block_hdr_len) txs_bytes = block_itm_bytes[block_txs_start:block_txs_start + block_txs_len] remaining_bytes = block_itm_bytes[block_txs_start + block_txs_len:] # parse statistics variables short_tx_index = 0 unknown_tx_sids = [] unknown_tx_hashes = [] # creating transactions content content_size = 0 buf = deque() tx_count = 0 tx_start_index = 0 while True: if tx_start_index >= len(txs_bytes): break _, tx_itm_len, tx_itm_start = rlp_utils.consume_length_prefix( txs_bytes, tx_start_index) tx_bytes = txs_bytes[tx_itm_start:tx_itm_start + tx_itm_len] is_full_tx_start = 0 is_full_tx, is_full_tx_len, = rlp_utils.decode_int( tx_bytes, is_full_tx_start) _, tx_content_len, tx_content_start = rlp_utils.consume_length_prefix( tx_bytes, is_full_tx_start + is_full_tx_len) tx_content_bytes = tx_bytes[tx_content_start:tx_content_start + tx_content_len] if is_full_tx: tx_bytes = tx_content_bytes else: short_id = short_ids[short_tx_index] tx_hash, tx_bytes, _ = tx_service.get_transaction(short_id) if tx_hash is None: unknown_tx_sids.append(short_id) elif tx_bytes is None: unknown_tx_hashes.append(tx_hash) short_tx_index += 1 if tx_bytes is not None and not unknown_tx_sids and not unknown_tx_hashes: buf.append(tx_bytes) content_size += len(tx_bytes) tx_count += 1 tx_start_index = tx_itm_start + tx_itm_len if not unknown_tx_sids and not unknown_tx_hashes: txs_prefix = rlp_utils.get_length_prefix_list(content_size) buf.appendleft(txs_prefix) content_size += len(txs_prefix) buf.appendleft(full_hdr_bytes) content_size += len(full_hdr_bytes) buf.append(remaining_bytes) content_size += len(remaining_bytes) msg_len_prefix = rlp_utils.get_length_prefix_list(content_size) buf.appendleft(msg_len_prefix) block_msg_bytes = bytearray(content_size) off = 0 for blob in buf: next_off = off + len(blob) block_msg_bytes[off:next_off] = blob off = next_off block_msg = InternalEthBlockInfo(block_msg_bytes) logger.debug( "Successfully parsed block broadcast message. {} " "transactions in block {}", tx_count, block_hash) bx_block_hash = convert.bytes_to_hex( crypto.double_sha256(bx_block_msg)) compressed_size = len(bx_block_msg) block_info = BlockInfo( block_hash, short_ids, decompress_start_datetime, datetime.datetime.utcnow(), (time.time() - decompress_start_timestamp) * 1000, tx_count, bx_block_hash, convert.bytes_to_hex(block_msg.prev_block_hash().binary), len(block_msg.rawbytes()), compressed_size, 100 - float(compressed_size) / content_size * 100, []) return BlockDecompressionResult(block_msg, block_info, unknown_tx_sids, unknown_tx_hashes) else: logger.debug( "Block recovery needed for {}. Missing {} sids, {} tx hashes. " "Total txs in block: {}", block_hash, len(unknown_tx_sids), len(unknown_tx_hashes), tx_count) return BlockDecompressionResult( None, BlockInfo(block_hash, short_ids, decompress_start_datetime, datetime.datetime.utcnow(), (time.time() - decompress_start_timestamp) * 1000, None, None, None, None, None, None, []), unknown_tx_sids, unknown_tx_hashes)
def test_block_to_bx_block__no_compressed_block(self): txs = [] txs_bytes = [] txs_hashes = [] short_ids = [] used_short_ids = [] tx_count = 150 for i in range(1, tx_count): tx = mock_eth_messages.get_dummy_transaction(1) txs.append(tx) tx_bytes = rlp.encode(tx, Transaction) txs_bytes.append(tx_bytes) tx_hash = tx.hash() txs_hashes.append(tx_hash) short_ids.append(0) block = Block( mock_eth_messages.get_dummy_block_header(1), txs, [ mock_eth_messages.get_dummy_block_header(2), mock_eth_messages.get_dummy_block_header(3), ] ) dummy_chain_difficulty = 10 block_msg = NewBlockEthProtocolMessage(None, block, dummy_chain_difficulty) self.assertTrue(block_msg.rawbytes()) internal_new_block_msg = InternalEthBlockInfo.from_new_block_msg(block_msg) bx_block_msg, block_info = self.eth_message_converter.block_to_bx_block( internal_new_block_msg, self.tx_service, False, 0 ) self.assertTrue(len(bx_block_msg) >= len(internal_new_block_msg.rawbytes())) self.assertEqual(len(txs), block_info.txn_count) self.assertEqual(convert.bytes_to_hex(block.header.prev_hash), block_info.prev_block_hash) self.assertEqual(used_short_ids, list(block_info.short_ids)) self.assertTrue(bx_block_msg) self.assertIsInstance(bx_block_msg, memoryview) block_offsets = compact_block_short_ids_serializer.get_bx_block_offsets(bx_block_msg) compact_block = rlp.decode( bx_block_msg[block_offsets.block_begin_offset: block_offsets.short_id_offset].tobytes(), CompactBlock ) self.assertTrue(compact_block) self.assertIsInstance(compact_block, CompactBlock) self._assert_values_equal(compact_block.header, block.header) self._assert_values_equal(compact_block.uncles, block.uncles) self.assertEqual(len(compact_block.transactions), len(block.transactions)) for tx, short_tx, i in zip(block.transactions, compact_block.transactions, range(1, tx_count)): self.assertIsInstance(tx, Transaction) self.assertIsInstance(short_tx, ShortTransaction) self.assertEqual(1, short_tx.full_transaction) self.assertEqual(short_tx.transaction_bytes, txs_bytes[i - 1]) self.assertEqual(compact_block.chain_difficulty, block_msg.chain_difficulty) converted_block_msg, _, _, _ = self.eth_message_converter.bx_block_to_block(bx_block_msg, self.tx_service) self.assertIsNotNone(converted_block_msg)