def block_to_bx_block( self, block_msg: InternalEthBlockInfo, tx_service: TransactionService) -> Tuple[memoryview, BlockInfo]: """ Convert Ethereum new block message to internal broadcast message with transactions replaced with short ids The code is optimized and does not make copies of bytes :param block_msg: Ethereum new block message :param tx_service: Transactions service :return: Internal broadcast message bytes (bytearray), tuple (txs count, previous block hash) """ compress_start_datetime = datetime.datetime.utcnow() compress_start_timestamp = time.time() msg_bytes = memoryview(block_msg.rawbytes()) _, block_msg_itm_len, block_msg_itm_start = rlp_utils.consume_length_prefix( msg_bytes, 0) block_msg_bytes = msg_bytes[block_msg_itm_start:block_msg_itm_start + block_msg_itm_len] _, block_hdr_itm_len, block_hdr_itm_start = rlp_utils.consume_length_prefix( block_msg_bytes, 0) block_hdr_full_bytes = block_msg_bytes[0:block_hdr_itm_start + block_hdr_itm_len] block_hdr_bytes = block_msg_bytes[ block_hdr_itm_start:block_hdr_itm_start + block_hdr_itm_len] _, prev_block_itm_len, prev_block_itm_start = rlp_utils.consume_length_prefix( block_hdr_bytes, 0) prev_block_bytes = block_hdr_bytes[ prev_block_itm_start:prev_block_itm_start + prev_block_itm_len] _, txs_itm_len, txs_itm_start = rlp_utils.consume_length_prefix( block_msg_bytes, block_hdr_itm_start + block_hdr_itm_len) txs_bytes = block_msg_bytes[txs_itm_start:txs_itm_start + txs_itm_len] remaining_bytes = block_msg_bytes[txs_itm_start + txs_itm_len:] used_short_ids = [] # creating transactions content content_size = 0 buf = deque() tx_start_index = 0 tx_count = 0 while True: if tx_start_index >= len(txs_bytes): break _, tx_item_length, tx_item_start = rlp_utils.consume_length_prefix( txs_bytes, tx_start_index) tx_bytes = txs_bytes[tx_start_index:tx_item_start + tx_item_length] tx_hash_bytes = eth_common_utils.keccak_hash(tx_bytes) tx_hash = Sha256Hash(tx_hash_bytes) short_id = tx_service.get_short_id(tx_hash) if short_id <= 0: is_full_tx_bytes = rlp_utils.encode_int(1) tx_content_bytes = tx_bytes else: is_full_tx_bytes = rlp_utils.encode_int(0) used_short_ids.append(short_id) tx_content_bytes = bytes() tx_content_prefix = rlp_utils.get_length_prefix_str( len(tx_content_bytes)) short_tx_content_size = len(is_full_tx_bytes) + len( tx_content_prefix) + len(tx_content_bytes) short_tx_content_prefix_bytes = rlp_utils.get_length_prefix_list( short_tx_content_size) buf.append(short_tx_content_prefix_bytes) buf.append(is_full_tx_bytes) buf.append(tx_content_prefix) buf.append(tx_content_bytes) content_size += len( short_tx_content_prefix_bytes) + short_tx_content_size tx_start_index = tx_item_start + tx_item_length tx_count += 1 list_of_txs_prefix_bytes = rlp_utils.get_length_prefix_list( content_size) buf.appendleft(list_of_txs_prefix_bytes) content_size += len(list_of_txs_prefix_bytes) buf.appendleft(block_hdr_full_bytes) content_size += len(block_hdr_full_bytes) buf.append(remaining_bytes) content_size += len(remaining_bytes) compact_block_msg_prefix = rlp_utils.get_length_prefix_list( content_size) buf.appendleft(compact_block_msg_prefix) content_size += len(compact_block_msg_prefix) short_ids_bytes = compact_block_short_ids_serializer.serialize_short_ids_into_bytes( used_short_ids) buf.append(short_ids_bytes) content_size += constants.UL_ULL_SIZE_IN_BYTES offset_buf = struct.pack("<Q", content_size) buf.appendleft(offset_buf) content_size += len(short_ids_bytes) # Parse it into the bloXroute message format and send it along block = bytearray(content_size) off = 0 for blob in buf: next_off = off + len(blob) block[off:next_off] = blob off = next_off bx_block_hash = convert.bytes_to_hex(crypto.double_sha256(block)) original_size = len(block_msg.rawbytes()) block_info = BlockInfo(block_msg.block_hash(), used_short_ids, compress_start_datetime, datetime.datetime.utcnow(), (time.time() - compress_start_timestamp) * 1000, tx_count, bx_block_hash, convert.bytes_to_hex(prev_block_bytes), original_size, content_size, 100 - float(content_size) / original_size * 100) return memoryview(block), block_info
def block_to_bx_block( self, block_msg: OntConsensusMessage, tx_service: TransactionService, enable_block_compression: bool, min_tx_age_seconds: float ) -> Tuple[memoryview, BlockInfo]: """ Pack an Ontology consensus message's transactions into a bloXroute block. """ consensus_msg = block_msg compress_start_datetime = datetime.utcnow() compress_start_timestamp = time.time() size = 0 buf = deque() short_ids = [] ignored_sids = [] original_size = len(consensus_msg.rawbytes()) consensus_payload_header = consensus_msg.consensus_payload_header() consensus_payload_header_len = bytearray(ont_constants.ONT_INT_LEN) struct.pack_into("<L", consensus_payload_header_len, 0, len(consensus_payload_header)) size += ont_constants.ONT_INT_LEN buf.append(consensus_payload_header_len) size += len(consensus_payload_header) buf.append(consensus_payload_header) consensus_data_type = bytearray(ont_constants.ONT_CHAR_LEN) struct.pack_into("<B", consensus_data_type, 0, consensus_msg.consensus_data_type()) size += ont_constants.ONT_CHAR_LEN buf.append(consensus_data_type) consensus_data_len = bytearray(ont_constants.ONT_INT_LEN) struct.pack_into("<L", consensus_data_len, 0, consensus_msg.consensus_data_len()) size += ont_constants.ONT_INT_LEN buf.append(consensus_data_len) block_start_len = consensus_msg.block_start_len_memoryview() txn_header = consensus_msg.txn_header() block_start_len_and_txn_header_total_len = bytearray(ont_constants.ONT_INT_LEN) struct.pack_into("<L", block_start_len_and_txn_header_total_len, 0, len(block_start_len) + len(txn_header)) size += ont_constants.ONT_INT_LEN buf.append(block_start_len_and_txn_header_total_len) size += len(block_start_len) buf.append(block_start_len) size += len(txn_header) buf.append(txn_header) max_timestamp_for_compression = time.time() - min_tx_age_seconds for tx in consensus_msg.txns(): tx_hash, _ = ont_messages_util.get_txid(tx) short_id = tx_service.get_short_id(tx_hash) short_id_assign_time = 0 if short_id != constants.NULL_TX_SID: short_id_assign_time = tx_service.get_short_id_assign_time(short_id) if short_id == constants.NULL_TX_SID or \ not enable_block_compression or \ short_id_assign_time > max_timestamp_for_compression: if short_id != constants.NULL_TX_SIDS: ignored_sids.append(short_id) buf.append(tx) size += len(tx) else: short_ids.append(short_id) buf.append(ont_constants.ONT_SHORT_ID_INDICATOR_AS_BYTEARRAY) size += 1 # Prepend owner and signature, consensus payload tail, tx count and block hash to bx_block owner_and_signature = consensus_msg.owner_and_signature() owner_and_signature_len = bytearray(ont_constants.ONT_INT_LEN) struct.pack_into("<L", owner_and_signature_len, 0, len(owner_and_signature)) size += len(owner_and_signature) buf.appendleft(owner_and_signature) size += ont_constants.ONT_INT_LEN buf.appendleft(owner_and_signature_len) payload_tail = consensus_msg.payload_tail() payload_tail_len = bytearray(ont_constants.ONT_INT_LEN) struct.pack_into("<L", payload_tail_len, 0, len(payload_tail)) size += len(payload_tail) buf.appendleft(payload_tail) size += ont_constants.ONT_INT_LEN buf.appendleft(payload_tail_len) txn_count = bytearray(ont_constants.ONT_INT_LEN) struct.pack_into("<L", txn_count, 0, consensus_msg.txn_count()) size += ont_constants.ONT_INT_LEN buf.appendleft(txn_count) block_hash = consensus_msg.block_hash().binary size += ont_constants.ONT_HASH_LEN buf.appendleft(block_hash) is_consensus_msg_buf = struct.pack("?", True) buf.appendleft(is_consensus_msg_buf) size += 1 block = finalize_block_bytes(buf, size, short_ids) prev_block_hash = convert.bytes_to_hex(consensus_msg.prev_block_hash().binary) bx_block_hash = convert.bytes_to_hex(crypto.double_sha256(block)) block_info = BlockInfo( consensus_msg.block_hash(), short_ids, compress_start_datetime, datetime.utcnow(), (time.time() - compress_start_timestamp) * 1000, consensus_msg.txn_count(), bx_block_hash, prev_block_hash, original_size, size, 100 - float(size) / original_size * 100, ignored_sids ) return memoryview(block), block_info
def block_to_bx_block( self, block_msg: InternalEthBlockInfo, tx_service: TransactionService, enable_block_compression: bool, min_tx_age_seconds: float ) -> Tuple[memoryview, BlockInfo]: """ Convert Ethereum new block message to internal broadcast message with transactions replaced with short ids The code is optimized and does not make copies of bytes :param block_msg: Ethereum new block message :param tx_service: Transactions service :param enable_block_compression :param min_tx_age_seconds :return: Internal broadcast message bytes (bytearray), tuple (txs count, previous block hash) """ compress_start_datetime = datetime.datetime.utcnow() compress_start_timestamp = time.time() txs_bytes, block_hdr_full_bytes, remaining_bytes, prev_block_bytes = parse_block_message(block_msg) used_short_ids = [] # creating transactions content content_size = 0 buf = deque() ignored_sids = [] tx_start_index = 0 tx_count = 0 original_size = len(block_msg.rawbytes()) max_timestamp_for_compression = time.time() - min_tx_age_seconds while True: if tx_start_index >= len(txs_bytes): break _, tx_item_length, tx_item_start = rlp_utils.consume_length_prefix(txs_bytes, tx_start_index) tx_bytes = txs_bytes[tx_start_index:tx_item_start + tx_item_length] tx_hash_bytes = eth_common_utils.keccak_hash(tx_bytes) tx_hash = Sha256Hash(tx_hash_bytes) short_id = tx_service.get_short_id(tx_hash) short_id_assign_time = 0 if short_id != constants.NULL_TX_SID: short_id_assign_time = tx_service.get_short_id_assign_time(short_id) if short_id <= constants.NULL_TX_SID or \ not enable_block_compression or short_id_assign_time > max_timestamp_for_compression: if short_id > constants.NULL_TX_SID: ignored_sids.append(short_id) is_full_tx_bytes = rlp_utils.encode_int(1) tx_content_bytes = tx_bytes else: is_full_tx_bytes = rlp_utils.encode_int(0) used_short_ids.append(short_id) tx_content_bytes = bytes() tx_content_prefix = rlp_utils.get_length_prefix_str(len(tx_content_bytes)) short_tx_content_size = len(is_full_tx_bytes) + len(tx_content_prefix) + len(tx_content_bytes) short_tx_content_prefix_bytes = rlp_utils.get_length_prefix_list(short_tx_content_size) buf.append(short_tx_content_prefix_bytes) buf.append(is_full_tx_bytes) buf.append(tx_content_prefix) buf.append(tx_content_bytes) content_size += len(short_tx_content_prefix_bytes) + short_tx_content_size tx_start_index = tx_item_start + tx_item_length tx_count += 1 list_of_txs_prefix_bytes = rlp_utils.get_length_prefix_list(content_size) buf.appendleft(list_of_txs_prefix_bytes) content_size += len(list_of_txs_prefix_bytes) buf.appendleft(block_hdr_full_bytes) content_size += len(block_hdr_full_bytes) buf.append(remaining_bytes) content_size += len(remaining_bytes) compact_block_msg_prefix = rlp_utils.get_length_prefix_list(content_size) buf.appendleft(compact_block_msg_prefix) content_size += len(compact_block_msg_prefix) block = finalize_block_bytes(buf, content_size, used_short_ids) bx_block_hash = convert.bytes_to_hex(crypto.double_sha256(block)) block_info = BlockInfo( block_msg.block_hash(), used_short_ids, compress_start_datetime, datetime.datetime.utcnow(), (time.time() - compress_start_timestamp) * 1000, tx_count, bx_block_hash, convert.bytes_to_hex(prev_block_bytes), original_size, content_size, 100 - float(content_size) / original_size * 100, ignored_sids ) return memoryview(block), block_info
def block_to_bx_block(self, block_msg: BlockOntMessage, tx_service: TransactionService) -> \ Tuple[memoryview, BlockInfo]: """ Compresses a Ontology block's transactions and packs it into a bloXroute block. """ compress_start_datetime = datetime.utcnow() compress_start_timestamp = time.time() size = 0 buf = deque() short_ids = [] header = block_msg.txn_header() size += len(header) buf.append(header) for tx in block_msg.txns(): tx_hash, _ = ont_messages_util.get_txid(tx) short_id = tx_service.get_short_id(tx_hash) if short_id == constants.NULL_TX_SID: buf.append(tx) size += len(tx) else: short_ids.append(short_id) buf.append(ont_constants.ONT_SHORT_ID_INDICATOR_AS_BYTEARRAY) size += 1 serialized_short_ids = compact_block_short_ids_serializer.serialize_short_ids_into_bytes( short_ids) buf.append(serialized_short_ids) size += constants.UL_ULL_SIZE_IN_BYTES merkle_root = block_msg.merkle_root() buf.appendleft(merkle_root) size += ont_constants.ONT_HASH_LEN is_consensus_msg_buf = struct.pack("?", False) buf.appendleft(is_consensus_msg_buf) size += 1 offset_buf = struct.pack("<Q", size) buf.appendleft(offset_buf) size += len(serialized_short_ids) block = bytearray(size) off = 0 for blob in buf: next_off = off + len(blob) block[off:next_off] = blob off = next_off prev_block_hash = convert.bytes_to_hex( block_msg.prev_block_hash().binary) bx_block_hash = convert.bytes_to_hex(crypto.double_sha256(block)) original_size = len(block_msg.rawbytes()) block_info = BlockInfo(block_msg.block_hash(), short_ids, compress_start_datetime, datetime.utcnow(), (time.time() - compress_start_timestamp) * 1000, block_msg.txn_count(), bx_block_hash, prev_block_hash, original_size, size, 100 - float(size) / original_size * 100) return memoryview(block), block_info
def block_to_bx_block( self, block_msg: OntConsensusMessage, tx_service: TransactionService ) -> Tuple[memoryview, BlockInfo]: """ Compresses a Ontology consensus message's transactions and packs it into a bloXroute block. """ consensus_msg = block_msg compress_start_datetime = datetime.utcnow() compress_start_timestamp = time.time() size = 0 buf = deque() short_ids = [] consensus_payload_header = consensus_msg.consensus_payload_header() consensus_payload_header_len = bytearray(ont_constants.ONT_INT_LEN) struct.pack_into("<L", consensus_payload_header_len, 0, len(consensus_payload_header)) size += ont_constants.ONT_INT_LEN buf.append(consensus_payload_header_len) size += len(consensus_payload_header) buf.append(consensus_payload_header) consensus_data_type = bytearray(ont_constants.ONT_CHAR_LEN) struct.pack_into("<B", consensus_data_type, 0, consensus_msg.consensus_data_type()) size += ont_constants.ONT_CHAR_LEN buf.append(consensus_data_type) consensus_data_len = bytearray(ont_constants.ONT_INT_LEN) struct.pack_into("<L", consensus_data_len, 0, consensus_msg.consensus_data_len()) size += ont_constants.ONT_INT_LEN buf.append(consensus_data_len) block_start_len = consensus_msg.block_start_len_memoryview() txn_header = consensus_msg.txn_header() block_start_len_and_txn_header_total_len = bytearray(ont_constants.ONT_INT_LEN) struct.pack_into("<L", block_start_len_and_txn_header_total_len, 0, len(block_start_len) + len(txn_header)) size += ont_constants.ONT_INT_LEN buf.append(block_start_len_and_txn_header_total_len) size += len(block_start_len) buf.append(block_start_len) size += len(txn_header) buf.append(txn_header) for tx in consensus_msg.txns(): tx_hash, _ = ont_messages_util.get_txid(tx) short_id = tx_service.get_short_id(tx_hash) if short_id == constants.NULL_TX_SID: buf.append(tx) size += len(tx) else: short_ids.append(short_id) buf.append(ont_constants.ONT_SHORT_ID_INDICATOR_AS_BYTEARRAY) size += 1 # Prepend owner and signature, consensus payload tail, tx count and block hash to bx_block owner_and_signature = consensus_msg.owner_and_signature() owner_and_signature_len = bytearray(ont_constants.ONT_INT_LEN) struct.pack_into("<L", owner_and_signature_len, 0, len(owner_and_signature)) size += len(owner_and_signature) buf.appendleft(owner_and_signature) size += ont_constants.ONT_INT_LEN buf.appendleft(owner_and_signature_len) payload_tail = consensus_msg.payload_tail() payload_tail_len = bytearray(ont_constants.ONT_INT_LEN) struct.pack_into("<L", payload_tail_len, 0, len(payload_tail)) size += len(payload_tail) buf.appendleft(payload_tail) size += ont_constants.ONT_INT_LEN buf.appendleft(payload_tail_len) txn_count = bytearray(ont_constants.ONT_INT_LEN) struct.pack_into("<L", txn_count, 0, consensus_msg.txn_count()) size += ont_constants.ONT_INT_LEN buf.appendleft(txn_count) block_hash = consensus_msg.block_hash().binary size += ont_constants.ONT_HASH_LEN buf.appendleft(block_hash) is_consensus_msg_buf = struct.pack("?", True) buf.appendleft(is_consensus_msg_buf) size += 1 serialized_short_ids = compact_block_short_ids_serializer.serialize_short_ids_into_bytes(short_ids) buf.append(serialized_short_ids) size += constants.UL_ULL_SIZE_IN_BYTES offset_buf = struct.pack("<Q", size) buf.appendleft(offset_buf) size += len(serialized_short_ids) block = bytearray(size) off = 0 for blob in buf: next_off = off + len(blob) block[off:next_off] = blob off = next_off prev_block_hash = convert.bytes_to_hex(consensus_msg.prev_block_hash().binary) bx_block_hash = convert.bytes_to_hex(crypto.double_sha256(block)) original_size = len(consensus_msg.rawbytes()) block_info = BlockInfo( consensus_msg.block_hash(), short_ids, compress_start_datetime, datetime.utcnow(), (time.time() - compress_start_timestamp) * 1000, consensus_msg.txn_count(), bx_block_hash, prev_block_hash, original_size, size, 100 - float(size) / original_size * 100 ) return memoryview(block), block_info