def __init__(self, node: "OntGatewayNode", network_num: int): super(OntExtensionBlockCleanupService, self).__init__(node, network_num) self.block_cleanup_tasks = TaskQueueProxy( self._create_block_cleanup_task) self.block_confirmation_cleanup_tasks = TaskQueueProxy( create_block_confirmation_cleanup_task)
def __init__(self, btc_magic): super(BtcExtensionMessageConverter, self).__init__(btc_magic) self._default_block_size = self.DEFAULT_BLOCK_SIZE self.compression_tasks = TaskQueueProxy(self._create_compression_task) self.compact_mapping_tasks = TaskQueueProxy(self._create_compact_mapping_task) self.decompression_tasks = TaskQueueProxy(self._create_decompression_task) self._extension_recovered_items: Dict[int, ExtensionCompactBlockRecoveryData] = {}
def __init__(self, ont_magic: int): super(OntExtensionMessageConverter, self).__init__(ont_magic) self._default_block_size = self.DEFAULT_BLOCK_SIZE self.compression_tasks = TaskQueueProxy(self._create_compression_task) self.decompression_tasks = TaskQueueProxy( self._create_decompression_task)
class OntExtensionMessageConverter(AbstractOntMessageConverter): DEFAULT_BLOCK_SIZE = ont_constants.ONT_DEFAULT_BLOCK_SIZE MINIMAL_SUB_TASK_TX_COUNT = ont_constants.ONT_MINIMAL_SUB_TASK_TX_COUNT def __init__(self, ont_magic: int): super(OntExtensionMessageConverter, self).__init__(ont_magic) self._default_block_size = self.DEFAULT_BLOCK_SIZE self.compression_tasks = TaskQueueProxy(self._create_compression_task) self.decompression_tasks = TaskQueueProxy( self._create_decompression_task) def block_to_bx_block( self, block_msg, tx_service, enable_block_compression: bool, min_tx_age_seconds: float) -> Tuple[memoryview, BlockInfo]: compress_start_datetime = datetime.utcnow() compress_start_timestamp = time.time() self._default_block_size = max(self._default_block_size, len(block_msg.buf)) tsk = self.compression_tasks.borrow_task() tsk.init(tpe.InputBytes(block_msg.buf), tx_service.proxy, enable_block_compression, min_tx_age_seconds) try: task_pool_proxy.run_task(tsk) except tpe.AggregatedException as e: self.compression_tasks.return_task(tsk) raise message_conversion_error.btc_block_compression_error( block_msg.block_hash(), e) bx_block = tsk.bx_block() block = memoryview(bx_block) compressed_size = len(block) original_size = len(block_msg.rawbytes()) block_hash = OntObjectHash( binary=convert.hex_to_bytes(tsk.block_hash().hex_string())) block_info = BlockInfo( block_hash, tsk.short_ids(), compress_start_datetime, datetime.utcnow(), (time.time() - compress_start_timestamp) * 1000, tsk.txn_count(), tsk.compressed_block_hash().hex_string(), tsk.prev_block_hash().hex_string(), original_size, compressed_size, 100 - float(compressed_size) / original_size * 100, tsk.ignored_short_ids()) self.compression_tasks.return_task(tsk) return block, block_info def bx_block_to_block( self, bx_block_msg: memoryview, tx_service: ExtensionTransactionService ) -> BlockDecompressionResult: decompress_start_datetime = datetime.utcnow() decompress_start_timestamp = time.time() tsk = self.decompression_tasks.borrow_task() tsk.init(tpe.InputBytes(bx_block_msg), tx_service.proxy) try: task_pool_proxy.run_task(tsk) except tpe.AggregatedException as e: self.decompression_tasks.return_task(tsk) header_info = ont_normal_message_converter.parse_bx_block_header( bx_block_msg, deque()) raise message_conversion_error.btc_block_decompression_error( header_info.block_hash, e) total_tx_count = tsk.tx_count() unknown_tx_hashes = [ Sha256Hash(bytearray(unknown_tx_hash.binary())) for unknown_tx_hash in tsk.unknown_tx_hashes() ] unknown_tx_sids = tsk.unknown_tx_sids() block_hash = OntObjectHash( binary=convert.hex_to_bytes(tsk.block_hash().hex_string())) if tsk.success(): ont_block_msg = BlockOntMessage( buf=memoryview(tsk.block_message())) logger.debug( "Successfully parsed block broadcast message. {} transactions " "in block {}", total_tx_count, block_hash) else: ont_block_msg = None logger.debug( "Block recovery needed for {}. Missing {} sids, {} tx hashes. " "Total txs in block: {}", block_hash, len(unknown_tx_sids), len(unknown_tx_hashes), total_tx_count) block_info = get_block_info(bx_block_msg, block_hash, tsk.short_ids(), decompress_start_datetime, decompress_start_timestamp, total_tx_count, ont_block_msg) self.decompression_tasks.return_task(tsk) return BlockDecompressionResult(ont_block_msg, block_info, unknown_tx_sids, unknown_tx_hashes) def special_memory_size(self, ids: Optional[Set[int]] = None) -> SpecialTuple: return memory_utils.add_special_objects(self.compression_tasks, self.decompression_tasks, ids=ids) def _create_compression_task(self) -> tpe.OntBlockCompressionTask: return tpe.OntBlockCompressionTask(self._default_block_size, self.MINIMAL_SUB_TASK_TX_COUNT) def _create_decompression_task(self) -> tpe.OntBlockDecompressionTask: return tpe.OntBlockDecompressionTask(self._default_block_size, self.MINIMAL_SUB_TASK_TX_COUNT)
def __init__(self, node: "EthGatewayNode", network_num: int): super().__init__(node, network_num) self.cleanup_tasks = TaskQueueProxy(create_cleanup_task)
class OntExtensionBlockCleanupService(AbstractOntBlockCleanupService): MINIMAL_SUB_TASK_TX_COUNT = ont_constants.ONT_MINIMAL_SUB_TASK_TX_COUNT def __init__(self, node: "OntGatewayNode", network_num: int): super(OntExtensionBlockCleanupService, self).__init__(node, network_num) self.block_cleanup_tasks = TaskQueueProxy( self._create_block_cleanup_task) self.block_confirmation_cleanup_tasks = TaskQueueProxy( create_block_confirmation_cleanup_task) def clean_block_transactions( self, block_msg: BlockOntMessage, transaction_service: TransactionService) -> None: start_datetime = datetime.utcnow() start_time = time.time() tx_hash_to_contents_len_before_cleanup = transaction_service.get_tx_hash_to_contents_len( ) cleanup_task = self.block_cleanup_tasks.borrow_task() tx_service = typing.cast(ExtensionTransactionService, transaction_service) cleanup_task.init(tpe.InputBytes(block_msg.buf), tx_service.proxy) init_time = time.time() task_pool_proxy.run_task(cleanup_task) task_run_time = time.time() unknown_tx_hashes_count = len(cleanup_task.unknown_tx_hashes()) tx_property_fetch_time = time.time() short_ids = cleanup_task.short_ids() short_ids_fetch_time = time.time() short_ids_count = len(short_ids) tx_service.update_removed_transactions( cleanup_task.total_content_removed(), short_ids) remove_from_tx_service_time = time.time() # TODO : clean the short ids/transactions from the alarm queue after refactoring the transaction service block_hash = block_msg.block_hash() tx_service.on_block_cleaned_up(block_hash) tx_hash_to_contents_len_after_cleanup = transaction_service.get_tx_hash_to_contents_len( ) end_datetime = datetime.utcnow() end_time = time.time() logger.statistics({ "type": "BlockTransactionsCleanup", "block_hash": repr(block_hash), "unknown_tx_hashes_count": unknown_tx_hashes_count, "short_ids_count": short_ids_count, "block_transactions_count": cleanup_task.txn_count(), "start_datetime": start_datetime, "end_datetime": end_datetime, "task_init_time": init_time - start_time, "task_run_time": task_run_time - init_time, "tx_property_fetch_time": tx_property_fetch_time - task_run_time, "short_ids_fetch_time": short_ids_fetch_time - tx_property_fetch_time, "remove_from_tx_service_time": remove_from_tx_service_time - short_ids_fetch_time, "duration": end_time - start_time, "tx_hash_to_contents_len_before_cleanup": tx_hash_to_contents_len_before_cleanup, "tx_hash_to_contents_len_after_cleanup": tx_hash_to_contents_len_after_cleanup, }) self.block_cleanup_tasks.return_task(cleanup_task) self._block_hash_marked_for_cleanup.discard(block_hash) self.node.post_block_cleanup_tasks( block_hash=block_hash, short_ids=short_ids, unknown_tx_hashes=( Sha256Hash(convert.hex_to_bytes(tx_hash.hex_string())) for tx_hash in cleanup_task.unknown_tx_hashes())) def special_memory_size(self, ids: Optional[Set[int]] = None) -> SpecialTuple: return memory_utils.add_special_objects( self.block_cleanup_tasks, self.block_confirmation_cleanup_tasks, ids) # pyre-fixme[14]: `contents_cleanup` overrides method defined in # `AbstractBlockCleanupService` inconsistently. def contents_cleanup(self, transaction_service: TransactionService, block_confirmation_message: BlockConfirmationMessage): extension_cleanup_service_helpers.contents_cleanup( transaction_service, block_confirmation_message, self.block_confirmation_cleanup_tasks) def _create_block_cleanup_task(self) -> tpe.OntBlockCleanupTask: return tpe.OntBlockCleanupTask(self.MINIMAL_SUB_TASK_TX_COUNT)
class BtcExtensionMessageConverter(AbstractBtcMessageConverter): DEFAULT_BLOCK_SIZE = btc_constants.BTC_DEFAULT_BLOCK_SIZE MINIMAL_SUB_TASK_TX_COUNT = btc_constants.BTC_MINIMAL_SUB_TASK_TX_COUNT def __init__(self, btc_magic): super(BtcExtensionMessageConverter, self).__init__(btc_magic) self._default_block_size = self.DEFAULT_BLOCK_SIZE self.compression_tasks = TaskQueueProxy(self._create_compression_task) self.compact_mapping_tasks = TaskQueueProxy(self._create_compact_mapping_task) self.decompression_tasks = TaskQueueProxy(self._create_decompression_task) self._extension_recovered_items: Dict[int, ExtensionCompactBlockRecoveryData] = {} def block_to_bx_block(self, block_msg, tx_service) -> Tuple[memoryview, BlockInfo]: compress_start_datetime = datetime.utcnow() compress_start_timestamp = time.time() self._default_block_size = max(self._default_block_size, len(block_msg.buf)) tsk = self.compression_tasks.borrow_task() tsk.init(tpe.InputBytes(block_msg.buf), tx_service.proxy) try: task_pool_proxy.run_task(tsk) except tpe.AggregatedException as e: self.compression_tasks.return_task(tsk) raise message_conversion_error.btc_block_compression_error(block_msg.block_hash(), e) bx_block = tsk.bx_block() block = memoryview(bx_block) compressed_size = len(block) original_size = len(block_msg.rawbytes()) block_hash = BtcObjectHash( binary=convert.hex_to_bytes(tsk.block_hash().hex_string()) ) block_info = BlockInfo( block_hash, tsk.short_ids(), compress_start_datetime, datetime.utcnow(), (time.time() - compress_start_timestamp) * 1000, tsk.txn_count(), tsk.compressed_block_hash().hex_string(), tsk.prev_block_hash().hex_string(), original_size, compressed_size, 100 - float(compressed_size) / original_size * 100 ) self.compression_tasks.return_task(tsk) return block, block_info def bx_block_to_block(self, bx_block_msg, tx_service) -> BlockDecompressionResult: decompress_start_datetime = datetime.utcnow() decompress_start_timestamp = time.time() tsk = self.decompression_tasks.borrow_task() tsk.init(tpe.InputBytes(bx_block_msg), tx_service.proxy) try: task_pool_proxy.run_task(tsk) except tpe.AggregatedException as e: self.decompression_tasks.return_task(tsk) header_info = btc_normal_message_converter.parse_bx_block_header(bx_block_msg, deque()) raise message_conversion_error.btc_block_decompression_error(header_info.block_hash, e) total_tx_count = tsk.tx_count() unknown_tx_hashes = [Sha256Hash(bytearray(unknown_tx_hash.binary())) for unknown_tx_hash in tsk.unknown_tx_hashes()] unknown_tx_sids = tsk.unknown_tx_sids() block_hash = BtcObjectHash( binary=convert.hex_to_bytes(tsk.block_hash().hex_string()) ) if tsk.success(): btc_block_msg = BlockBtcMessage(buf=memoryview(tsk.block_message())) logger.debug( "Successfully parsed block broadcast message. {} transactions " "in block {}", total_tx_count, block_hash ) else: btc_block_msg = None logger.debug( "Block recovery needed for {}. Missing {} sids, {} tx hashes. " "Total txs in block: {}", block_hash, len(unknown_tx_sids), len(unknown_tx_hashes), total_tx_count ) block_info = get_block_info( bx_block_msg, block_hash, tsk.short_ids(), decompress_start_datetime, decompress_start_timestamp, total_tx_count, btc_block_msg ) self.decompression_tasks.return_task(tsk) return BlockDecompressionResult(btc_block_msg, block_info, unknown_tx_sids, unknown_tx_hashes) # pyre-fixme[14]: `compact_block_to_bx_block` overrides method defined in # `AbstractBtcMessageConverter` inconsistently. def compact_block_to_bx_block( self, compact_block: CompactBlockBtcMessage, transaction_service: ExtensionTransactionService ) -> CompactBlockCompressionResult: compress_start_datetime = datetime.utcnow() tsk = self.compact_mapping_tasks.borrow_task() tsk.init(tpe.InputBytes(compact_block.buf), transaction_service.proxy, compact_block.magic()) try: task_pool_proxy.run_task(tsk) except tpe.AggregatedException as e: self.compact_mapping_tasks.return_task(tsk) raise message_conversion_error.btc_compact_block_compression_error(compact_block.block_hash(), e) success = tsk.success() recovered_item = ExtensionCompactBlockRecoveryData(transaction_service, tsk) block_info = BlockInfo( compact_block.block_hash(), [], compress_start_datetime, compress_start_datetime, 0, None, None, None, len(compact_block.rawbytes()), None, None ) if success: result = CompactBlockCompressionResult( False, block_info, None, None, [], create_recovered_transactions() ) return self._extension_recovered_compact_block_to_bx_block(result, recovered_item) else: recovery_index = self._last_recovery_idx self._extension_recovered_items[recovery_index] = recovered_item self._last_recovery_idx += 1 return CompactBlockCompressionResult( False, block_info, None, recovery_index, tsk.missing_indices(), create_recovered_transactions() ) def recovered_compact_block_to_bx_block( # pyre-ignore self, failed_mapping_result: CompactBlockCompressionResult, ) -> CompactBlockCompressionResult: failed_block_info = failed_mapping_result.block_info block_info = BlockInfo( failed_block_info.block_hash, # pyre-ignore failed_block_info.short_ids, # pyre-ignore datetime.utcnow(), datetime.utcnow(), 0, None, None, None, failed_block_info.original_size, # pyre-ignore None, None ) failed_mapping_result.block_info = block_info recovered_item = self._extension_recovered_items.pop(failed_mapping_result.recovery_index) # pyre-ignore return self._extension_recovered_compact_block_to_bx_block( failed_mapping_result, recovered_item ) def special_memory_size(self, ids: Optional[Set[int]] = None) -> SpecialTuple: return memory_utils.add_special_objects( self.compression_tasks, self.decompression_tasks, self.compact_mapping_tasks, ids=ids ) def _extension_recovered_compact_block_to_bx_block( self, mapping_result: CompactBlockCompressionResult, recovery_item: ExtensionCompactBlockRecoveryData ) -> CompactBlockCompressionResult: mapping_task = recovery_item.mapping_task compression_task: tpe.BtcCompactBlockCompressionTask = mapping_task.compression_task() # pyre-fixme[16]: `List` has no attribute `vector`. compression_task.add_recovered_transactions(mapping_result.recovered_transactions.vector) mapping_block_info = mapping_result.block_info try: task_pool_proxy.run_task(compression_task) except tpe.AggregatedException as e: self.compact_mapping_tasks.return_task(mapping_task) # pyre-fixme[16]: `Optional` has no attribute `block_hash`. raise message_conversion_error.btc_compact_block_compression_error(mapping_block_info.block_hash, e) bx_block = memoryview(compression_task.bx_block()) block_hash = mapping_block_info.block_hash txn_count = compression_task.txn_count() compressed_block_hash = compression_task.compressed_block_hash().hex_string() prev_block_hash = compression_task.prev_block_hash().hex_string() short_ids = compression_task.short_ids() compress_end_datetime = datetime.utcnow() # pyre-fixme[16]: `Optional` has no attribute `start_datetime`. compress_start_datetime = mapping_block_info.start_datetime # pyre-fixme[16]: `Optional` has no attribute `original_size`. original_size = mapping_block_info.original_size compressed_size = len(bx_block) block_info = BlockInfo( block_hash, short_ids, compress_start_datetime, compress_end_datetime, (compress_end_datetime - compress_start_datetime).total_seconds() * 1000, txn_count, compressed_block_hash, prev_block_hash, original_size, compressed_size, 100 - float(compressed_size) / original_size * 100 ) self.compact_mapping_tasks.return_task(mapping_task) return CompactBlockCompressionResult(True, block_info, bx_block, None, [], []) def _create_compression_task(self) -> tpe.BtcBlockCompressionTask: return tpe.BtcBlockCompressionTask(self._default_block_size, self.MINIMAL_SUB_TASK_TX_COUNT) def _create_decompression_task(self) -> tpe.BtcBlockDecompressionTask: return tpe.BtcBlockDecompressionTask( self._default_block_size, self.MINIMAL_SUB_TASK_TX_COUNT ) def _create_compact_mapping_task(self) -> tpe.BtcCompactBlockMappingTask: return tpe.BtcCompactBlockMappingTask(self._default_block_size)
def __init__(self): super().__init__() self._default_block_size = self.DEFAULT_BLOCK_SIZE self.compression_tasks = TaskQueueProxy(self._create_compression_task) self.decompression_tasks = TaskQueueProxy( self._create_decompression_task)
class EthExtensionMessageConverter(EthAbstractMessageConverter): DEFAULT_BLOCK_SIZE = eth_common_constants.ETH_DEFAULT_BLOCK_SIZE MINIMAL_SUB_TASK_TX_COUNT = eth_common_constants.ETH_MINIMAL_SUB_TASK_TX_COUNT def __init__(self): super().__init__() self._default_block_size = self.DEFAULT_BLOCK_SIZE self.compression_tasks = TaskQueueProxy(self._create_compression_task) self.decompression_tasks = TaskQueueProxy( self._create_decompression_task) def block_to_bx_block( self, block_msg: InternalEthBlockInfo, tx_service: ExtensionTransactionService ) -> Tuple[memoryview, BlockInfo]: compress_start_datetime = datetime.datetime.utcnow() compress_start_timestamp = time.time() self._default_block_size = max(self._default_block_size, len(block_msg.rawbytes())) tsk = self.compression_tasks.borrow_task() tsk.init(tpe.InputBytes(block_msg.rawbytes()), tx_service.proxy) try: task_pool_proxy.run_task(tsk) except tpe.AggregatedException as e: self.compression_tasks.return_task(tsk) raise message_conversion_error.eth_block_compression_error( block_msg.block_hash(), e) bx_block = tsk.bx_block() starting_offset = tsk.starting_offset() block = memoryview(bx_block)[starting_offset:] compressed_size = len(block) original_size = len(block_msg.rawbytes()) - starting_offset block_hash = block_msg.block_hash() block_info = BlockInfo( block_hash, tsk.short_ids(), compress_start_datetime, datetime.datetime.utcnow(), (time.time() - compress_start_timestamp) * 1000, tsk.txn_count(), tsk.compressed_block_hash().hex_string(), tsk.prev_block_hash().hex_string(), original_size, compressed_size, 100 - float(compressed_size) / original_size * 100) self.compression_tasks.return_task(tsk) return block, block_info def bx_block_to_block(self, bx_block_msg, tx_service) -> BlockDecompressionResult: decompress_start_datetime = datetime.datetime.utcnow() decompress_start_timestamp = time.time() tsk = self.decompression_tasks.borrow_task() tsk.init(tpe.InputBytes(bx_block_msg), tx_service.proxy) try: task_pool_proxy.run_task(tsk) except tpe.AggregatedException as e: block_hash = Sha256Hash( convert.hex_to_bytes(tsk.block_hash().hex_string())) self.decompression_tasks.return_task(tsk) # TODO find a better solution raise message_conversion_error.eth_block_decompression_error( block_hash, e) total_tx_count = tsk.tx_count() unknown_tx_hashes = [ Sha256Hash(bytearray(unknown_tx_hash.binary())) for unknown_tx_hash in tsk.unknown_tx_hashes() ] unknown_tx_sids = tsk.unknown_tx_sids() block_hash = Sha256Hash( convert.hex_to_bytes(tsk.block_hash().hex_string())) if tsk.success(): starting_offset = tsk.starting_offset() block = memoryview(tsk.block_message())[starting_offset:] block_msg = InternalEthBlockInfo(block) content_size = len(block_msg.rawbytes()) logger.debug( "Successfully parsed block broadcast message. {} " "transactions in block {}", total_tx_count, block_hash) bx_block_hash = convert.bytes_to_hex( crypto.double_sha256(bx_block_msg)) compressed_size = len(bx_block_msg) block_info = BlockInfo( block_hash, tsk.short_ids(), decompress_start_datetime, datetime.datetime.utcnow(), (time.time() - decompress_start_timestamp) * 1000, total_tx_count, bx_block_hash, convert.bytes_to_hex(block_msg.prev_block_hash().binary), len(block_msg.rawbytes()), compressed_size, 100 - float(compressed_size) / content_size * 100) else: block_msg = None logger.debug( "Block recovery needed for {}. Missing {} sids, {} tx hashes. " "Total txs in block: {}", block_hash, len(unknown_tx_sids), len(unknown_tx_hashes), total_tx_count) block_info = BlockInfo( block_hash, tsk.short_ids(), decompress_start_datetime, datetime.datetime.utcnow(), (time.time() - decompress_start_timestamp) * 1000, None, None, None, None, None, None) self.decompression_tasks.return_task(tsk) return BlockDecompressionResult(block_msg, block_info, unknown_tx_sids, unknown_tx_hashes) def _create_compression_task(self) -> tpe.EthBlockCompressionTask: return tpe.EthBlockCompressionTask(self._default_block_size, self.MINIMAL_SUB_TASK_TX_COUNT) def _create_decompression_task(self) -> tpe.EthBlockDecompressionTask: return tpe.EthBlockDecompressionTask(self._default_block_size, self.MINIMAL_SUB_TASK_TX_COUNT)