def block_to_bx_block(self, block_msg: BlockOntMessage, tx_service: TransactionService) \ -> Tuple[memoryview, BlockInfo]: start_datetime = datetime.utcnow() start_time = time.time() block_info = BlockInfo( block_msg.block_hash(), [], start_datetime, datetime.utcnow(), (time.time() - start_time) * 1000, block_msg.txn_count(), str(block_msg.block_hash()), convert.bytes_to_hex(block_msg.prev_block_hash().binary), len(block_msg.rawbytes()), len(block_msg.rawbytes()), 0 ) return block_msg.rawbytes(), block_info
def bx_block_to_block(self, bx_block_msg: memoryview, tx_service: TransactionService) -> BlockDecompressionResult: start_datetime = datetime.utcnow() start_time = time.time() block_msg = BlockOntMessage(buf=bytearray(bx_block_msg)) block_info = BlockInfo( block_msg.block_hash(), [], start_datetime, datetime.utcnow(), (time.time() - start_time) * 1000, block_msg.txn_count(), str(block_msg.block_hash()), convert.bytes_to_hex(block_msg.prev_block_hash().binary), len(block_msg.rawbytes()), len(block_msg.rawbytes()), 0 ) return BlockDecompressionResult(block_msg, block_info, [], [])
def block_to_bx_block(self, block_msg: BlockOntMessage, tx_service: TransactionService) -> \ Tuple[memoryview, BlockInfo]: """ Compresses a Ontology block's transactions and packs it into a bloXroute block. """ compress_start_datetime = datetime.utcnow() compress_start_timestamp = time.time() size = 0 buf = deque() short_ids = [] header = block_msg.txn_header() size += len(header) buf.append(header) for tx in block_msg.txns(): tx_hash, _ = ont_messages_util.get_txid(tx) short_id = tx_service.get_short_id(tx_hash) if short_id == constants.NULL_TX_SID: buf.append(tx) size += len(tx) else: short_ids.append(short_id) buf.append(ont_constants.ONT_SHORT_ID_INDICATOR_AS_BYTEARRAY) size += 1 serialized_short_ids = compact_block_short_ids_serializer.serialize_short_ids_into_bytes( short_ids) buf.append(serialized_short_ids) size += constants.UL_ULL_SIZE_IN_BYTES merkle_root = block_msg.merkle_root() buf.appendleft(merkle_root) size += ont_constants.ONT_HASH_LEN is_consensus_msg_buf = struct.pack("?", False) buf.appendleft(is_consensus_msg_buf) size += 1 offset_buf = struct.pack("<Q", size) buf.appendleft(offset_buf) size += len(serialized_short_ids) block = bytearray(size) off = 0 for blob in buf: next_off = off + len(blob) block[off:next_off] = blob off = next_off prev_block_hash = convert.bytes_to_hex( block_msg.prev_block_hash().binary) bx_block_hash = convert.bytes_to_hex(crypto.double_sha256(block)) original_size = len(block_msg.rawbytes()) block_info = BlockInfo(block_msg.block_hash(), short_ids, compress_start_datetime, datetime.utcnow(), (time.time() - compress_start_timestamp) * 1000, block_msg.txn_count(), bx_block_hash, prev_block_hash, original_size, size, 100 - float(size) / original_size * 100) return memoryview(block), block_info