def clean_block_transactions( self, block_msg: BlockOntMessage, transaction_service: TransactionService) -> None: block_short_ids = [] block_unknown_tx_hashes = [] start_time = time.time() short_ids_count = 0 unknown_tx_hashes_count = 0 transactions_processed = 0 tx_hash_to_contents_len_before_cleanup = transaction_service.get_tx_hash_to_contents_len( ) short_id_count_before_cleanup = transaction_service.get_short_id_count( ) for tx in block_msg.txns(): tx_hash = OntObjectHash(buf=crypto.double_sha256(tx), length=ont_constants.ONT_HASH_LEN) short_ids = transaction_service.remove_transaction_by_tx_hash( tx_hash, force=True) if short_ids is None: unknown_tx_hashes_count += 1 block_unknown_tx_hashes.append(tx_hash) else: short_ids_count += len(short_ids) block_short_ids.extend(short_ids) transactions_processed += 1 block_hash = block_msg.block_hash() transaction_service.on_block_cleaned_up(block_hash) end_time = time.time() duration = end_time - start_time tx_hash_to_contents_len_after_cleanup = transaction_service.get_tx_hash_to_contents_len( ) short_id_count_after_cleanup = transaction_service.get_short_id_count() logger.debug( "Finished cleaning up block {}. Processed {} hashes, {} of which were unknown, and cleaned up {} " "short ids. Took {:.3f}s.", block_hash, transactions_processed, unknown_tx_hashes_count, short_ids_count, duration) transaction_service.log_block_transaction_cleanup_stats( block_hash, block_msg.txn_count(), tx_hash_to_contents_len_before_cleanup, tx_hash_to_contents_len_after_cleanup, short_id_count_before_cleanup, short_id_count_after_cleanup) self._block_hash_marked_for_cleanup.discard(block_hash) self.node.post_block_cleanup_tasks( block_hash=block_hash, short_ids=block_short_ids, unknown_tx_hashes=block_unknown_tx_hashes)
def block_to_bx_block(self, block_msg: BlockOntMessage, tx_service: TransactionService) -> \ Tuple[memoryview, BlockInfo]: """ Compresses a Ontology block's transactions and packs it into a bloXroute block. """ compress_start_datetime = datetime.utcnow() compress_start_timestamp = time.time() size = 0 buf = deque() short_ids = [] header = block_msg.txn_header() size += len(header) buf.append(header) for tx in block_msg.txns(): tx_hash, _ = ont_messages_util.get_txid(tx) short_id = tx_service.get_short_id(tx_hash) if short_id == constants.NULL_TX_SID: buf.append(tx) size += len(tx) else: short_ids.append(short_id) buf.append(ont_constants.ONT_SHORT_ID_INDICATOR_AS_BYTEARRAY) size += 1 serialized_short_ids = compact_block_short_ids_serializer.serialize_short_ids_into_bytes( short_ids) buf.append(serialized_short_ids) size += constants.UL_ULL_SIZE_IN_BYTES merkle_root = block_msg.merkle_root() buf.appendleft(merkle_root) size += ont_constants.ONT_HASH_LEN is_consensus_msg_buf = struct.pack("?", False) buf.appendleft(is_consensus_msg_buf) size += 1 offset_buf = struct.pack("<Q", size) buf.appendleft(offset_buf) size += len(serialized_short_ids) block = bytearray(size) off = 0 for blob in buf: next_off = off + len(blob) block[off:next_off] = blob off = next_off prev_block_hash = convert.bytes_to_hex( block_msg.prev_block_hash().binary) bx_block_hash = convert.bytes_to_hex(crypto.double_sha256(block)) original_size = len(block_msg.rawbytes()) block_info = BlockInfo(block_msg.block_hash(), short_ids, compress_start_datetime, datetime.utcnow(), (time.time() - compress_start_timestamp) * 1000, block_msg.txn_count(), bx_block_hash, prev_block_hash, original_size, size, 100 - float(size) / original_size * 100) return memoryview(block), block_info
def ont_transactions(self, block: BlockOntMessage = None) -> List[memoryview]: txs = block.txns() return [TxOntMessage(self.magic, self.version, tx[1:]) for tx in txs]