def contents_cleanup(transaction_service: TransactionService,
                     block_confirmation_message: AbstractCleanupMessage,
                     cleanup_tasks
                     ):
    start_datetime = datetime.utcnow()
    start_time = time.time()
    tx_service = typing.cast(ExtensionTransactionService, transaction_service)
    cleanup_task = cleanup_tasks.borrow_task()
    cleanup_task.init(tpe.InputBytes(block_confirmation_message.buf), tx_service.proxy)
    task_pool_proxy.run_task(cleanup_task)
    short_ids = cleanup_task.short_ids()
    total_content_removed = cleanup_task.total_content_removed()
    tx_count = cleanup_task.tx_count()
    message_hash = block_confirmation_message.message_hash()
    tx_service.update_removed_transactions(total_content_removed, short_ids)
    transaction_service.on_block_cleaned_up(message_hash)
    end_datetime = datetime.utcnow()
    end_time = time.time()
    duration = end_time - start_time
    logger.statistics(
        {
            "type": "MemoryCleanup",
            "event": "CacheStateAfterBlockCleanup",
            "data": transaction_service.get_cache_state_json(),
            "start_datetime": start_datetime,
            "end_datetime": end_datetime,
            "duration": duration,
            "total_content_removed": total_content_removed,
            "tx_count": tx_count,
            "short_ids_count": len(short_ids),
            "message_hash": repr(message_hash),
        }
    )
    cleanup_tasks.return_task(cleanup_task)
def create_txs_service_msg(
        transaction_service: TransactionService,
        tx_service_snap: List[Sha256Hash],
        sync_tx_content: bool = True) -> List[TxContentShortIds]:
    task_start = time.time()
    txs_content_short_ids: List[TxContentShortIds] = []
    txs_msg_len = 0
    while tx_service_snap:
        transaction_key = transaction_service.get_transaction_key(
            tx_service_snap.pop())
        short_ids = list(
            transaction_service.get_short_ids_by_key(transaction_key))
        if sync_tx_content:
            tx_content = transaction_service.get_transaction_by_key(
                transaction_key)
        else:
            tx_content = bytearray(0)
        # TODO: evaluate short id quota type flag value
        short_id_flags = [
            transaction_service.get_short_id_transaction_type(short_id)
            for short_id in short_ids
        ]
        tx_content_short_ids: TxContentShortIds = TxContentShortIds(
            transaction_key.transaction_hash, tx_content, short_ids,
            short_id_flags)

        txs_msg_len += txs_serializer.get_serialized_tx_content_short_ids_bytes_len(
            tx_content_short_ids)

        txs_content_short_ids.append(tx_content_short_ids)
        if txs_msg_len >= constants.TXS_MSG_SIZE or time.time(
        ) - task_start > constants.TXS_SYNC_TASK_DURATION:
            break
    return txs_content_short_ids
def create_txs_service_msg_from_time(
    transaction_service: TransactionService,
    start_time: float = 0,
    sync_tx_content: bool = True,
    snapshot_cache_keys: Optional[Set[TransactionCacheKeyType]] = None
) -> Tuple[List[TxContentShortIds], float, bool, Set[TransactionCacheKeyType]]:
    task_start = time.time()
    txs_content_short_ids: List[TxContentShortIds] = []
    txs_msg_len = 0
    if snapshot_cache_keys is None:
        snapshot_cache_keys = set()
    done = False
    timestamp = start_time
    expire_short_ids = []
    for short_id, timestamp in transaction_service._tx_assignment_expire_queue.queue.items(
    ):
        if timestamp > start_time:
            cache_key = transaction_service._short_id_to_tx_cache_key.get(
                short_id, None)
            if cache_key is not None:
                transaction_key = transaction_service.get_transaction_key(
                    None, cache_key)
                if cache_key not in snapshot_cache_keys:
                    snapshot_cache_keys.add(
                        transaction_key.transaction_cache_key)
                    short_ids = list(
                        transaction_service._tx_cache_key_to_short_ids[
                            transaction_key.transaction_cache_key])
                    if sync_tx_content and transaction_service.has_transaction_contents_by_key(
                            transaction_key):
                        tx_content = transaction_service._tx_cache_key_to_contents[
                            transaction_key.transaction_cache_key]
                    else:
                        tx_content = bytearray(0)
                    short_id_flags = [
                        transaction_service.get_short_id_transaction_type(
                            short_id) for short_id in short_ids
                    ]
                    tx_content_short_ids: TxContentShortIds = TxContentShortIds(
                        transaction_key.transaction_hash, tx_content,
                        short_ids, short_id_flags)
                    txs_msg_len += txs_serializer.get_serialized_tx_content_short_ids_bytes_len(
                        tx_content_short_ids)
                    txs_content_short_ids.append(tx_content_short_ids)
                    if txs_msg_len >= constants.TXS_MSG_SIZE or (
                            time.time() - task_start >
                            constants.TXS_SYNC_TASK_DURATION):
                        break
            else:
                expire_short_ids.append(short_id)
    else:
        done = True
    for short_id in expire_short_ids:
        transaction_service._tx_assignment_expire_queue.remove(short_id)
    return txs_content_short_ids, timestamp, done, snapshot_cache_keys
    def clean_block_transactions(
            self, block_msg: BlockBtcMessage, transaction_service: TransactionService
    ) -> None:
        start_datetime = datetime.utcnow()
        start_time = time.time()
        tx_hash_to_contents_len_before_cleanup = transaction_service.get_tx_hash_to_contents_len()
        cleanup_task = self.block_cleanup_tasks.borrow_task()
        tx_service = typing.cast(ExtensionTransactionService, transaction_service)
        cleanup_task.init(tpe.InputBytes(block_msg.buf), tx_service.proxy)
        init_time = time.time()
        task_pool_proxy.run_task(cleanup_task)
        task_run_time = time.time()
        unknown_tx_hashes_count = len(cleanup_task.unknown_tx_hashes())
        tx_property_fetch_time = time.time()
        short_ids = cleanup_task.short_ids()
        short_ids_fetch_time = time.time()
        short_ids_count = len(short_ids)
        tx_service.update_removed_transactions(cleanup_task.total_content_removed(), short_ids)
        remove_from_tx_service_time = time.time()
        # TODO : clean the short ids/transactions from the alarm queue after refactoring the transaction service
        block_hash = block_msg.block_hash()
        tx_service.on_block_cleaned_up(block_hash)
        tx_hash_to_contents_len_after_cleanup = transaction_service.get_tx_hash_to_contents_len()
        end_datetime = datetime.utcnow()
        end_time = time.time()

        logger.statistics(
            {
                "type": "BlockTransactionsCleanup",
                "block_hash": repr(block_hash),
                "unknown_tx_hashes_count": unknown_tx_hashes_count,
                "short_ids_count": short_ids_count,
                "block_transactions_count": cleanup_task.txn_count(),
                "start_datetime": start_datetime,
                "end_datetime": end_datetime,
                "task_init_time": init_time - start_time,
                "task_run_time": task_run_time - init_time,
                "tx_property_fetch_time": tx_property_fetch_time - task_run_time,
                "short_ids_fetch_time": short_ids_fetch_time - tx_property_fetch_time,
                "remove_from_tx_service_time": remove_from_tx_service_time - short_ids_fetch_time,
                "duration": end_time - start_time,
                "tx_hash_to_contents_len_before_cleanup": tx_hash_to_contents_len_before_cleanup,
                "tx_hash_to_contents_len_after_cleanup": tx_hash_to_contents_len_after_cleanup,
            }
        )
        self.block_cleanup_tasks.return_task(cleanup_task)
        self._block_hash_marked_for_cleanup.discard(block_hash)
        self.node.post_block_cleanup_tasks(
            block_hash=block_hash,
            short_ids=short_ids,
            unknown_tx_hashes=(
                Sha256Hash(convert.hex_to_bytes(tx_hash.hex_string()))
                for tx_hash in cleanup_task.unknown_tx_hashes()
            )
        )
    def setUp(self):
        self.node = MockGatewayNode(gateway_helpers.get_gateway_opts(
            8000,
            include_default_btc_args=True,
            compact_block_min_tx_count=5
        ))
        self.node.block_processing_service = MagicMock()

        self.connection = BtcNodeConnection(
            MockSocketConnection(node=self.node, ip_address=LOCALHOST, port=123), self.node
        )
        self.connection.node = self.node
        self.connection.peer_ip = LOCALHOST
        self.connection.peer_port = 8001
        self.connection.network_num = 2
        self.sut = BtcNodeConnectionProtocol(self.connection)

        full_block_msg = BlockBtcMessage(
            buf=bytearray(convert.hex_to_bytes(self.FULL_BLOCK_BYTES_HEX))
        )
        if self.node.opts.use_extensions:
            transaction_service = ExtensionTransactionService(self.node, 0)
        else:
            transaction_service = TransactionService(self.node, 0)

        short_id = 1
        for tx in full_block_msg.txns():
            tx_hash = btc_common_utils.get_txid(tx)
            transaction_service.set_transaction_contents(tx_hash, tx)
            transaction_service.assign_short_id(tx_hash, short_id)
            short_id += 1

        self.sut.connection.node._tx_service = transaction_service
    def clean_block_transactions(
            self, block_msg: BlockBtcMessage,
            transaction_service: TransactionService) -> None:
        block_short_ids = []
        block_unknown_tx_hashes = []
        start_time = time.time()

        short_ids_count = 0
        unknown_tx_hashes_count = 0
        transactions_processed = 0

        tx_hash_to_contents_len_before_cleanup = transaction_service.get_tx_hash_to_contents_len(
        )
        short_id_count_before_cleanup = transaction_service.get_short_id_count(
        )

        for tx in block_msg.txns():
            tx_hash = BtcObjectHash(buf=crypto.double_sha256(tx),
                                    length=BTC_SHA_HASH_LEN)
            short_ids = transaction_service.remove_transaction_by_tx_hash(
                tx_hash, force=True)
            if short_ids is None:
                unknown_tx_hashes_count += 1
                block_unknown_tx_hashes.append(tx_hash)
            else:
                short_ids_count += len(short_ids)
                block_short_ids.extend(short_ids)
            transactions_processed += 1
        block_hash = block_msg.block_hash()
        transaction_service.on_block_cleaned_up(block_hash)
        end_time = time.time()
        duration = end_time - start_time
        tx_hash_to_contents_len_after_cleanup = transaction_service.get_tx_hash_to_contents_len(
        )
        short_id_count_after_cleanup = transaction_service.get_short_id_count()

        logger.debug(
            "Finished cleaning up block {}. Processed {} hashes, {} of which were unknown, and cleaned up {} "
            "short ids. Took {:.3f}s.", block_hash, transactions_processed,
            unknown_tx_hashes_count, short_ids_count, duration)

        transaction_service.log_block_transaction_cleanup_stats(
            block_hash, block_msg.txn_count(),
            tx_hash_to_contents_len_before_cleanup,
            tx_hash_to_contents_len_after_cleanup,
            short_id_count_before_cleanup, short_id_count_after_cleanup)

        self._block_hash_marked_for_cleanup.discard(block_hash)
        self.node.post_block_cleanup_tasks(
            block_hash=block_hash,
            short_ids=block_short_ids,
            unknown_tx_hashes=block_unknown_tx_hashes)
    def clean_block_transactions_by_block_components(
            self, block_hash: Sha256Hash,
            transactions_list: Iterable[Sha256Hash],
            transaction_service: TransactionService) -> None:
        logger.debug("Processing block for cleanup: {}", block_hash)
        tx_hash_to_contents_len_before_cleanup = transaction_service.get_tx_hash_to_contents_len(
        )
        short_id_count_before_cleanup = transaction_service.get_short_id_count(
        )

        block_short_ids = []
        block_unknown_tx_hashes = []

        short_ids_count = 0
        unknown_tx_hashes_count = 0
        transactions_processed = 0

        start_time = time.time()
        for tx_hash in transactions_list:
            short_ids = transaction_service.remove_transaction_by_tx_hash(
                tx_hash, force=True)
            if short_ids is None:
                unknown_tx_hashes_count += 1
                block_unknown_tx_hashes.append(tx_hash)
            else:
                short_ids_count += len(short_ids)
                block_short_ids.extend(short_ids)
            transactions_processed += 1
        transaction_service.on_block_cleaned_up(block_hash)

        end_time = time.time()
        duration = end_time - start_time
        tx_hash_to_contents_len_after_cleanup = transaction_service.get_tx_hash_to_contents_len(
        )
        short_id_count_after_cleanup = transaction_service.get_short_id_count()

        logger.debug(
            "Finished cleaning up block {}. Processed {} hashes, {} of which were unknown, and cleaned up {} "
            "short ids. Took {:.3f}s.", block_hash, transactions_processed,
            unknown_tx_hashes_count, short_ids_count, duration)

        transaction_service.log_block_transaction_cleanup_stats(
            block_hash, transactions_processed,
            tx_hash_to_contents_len_before_cleanup,
            tx_hash_to_contents_len_after_cleanup,
            short_id_count_before_cleanup, short_id_count_after_cleanup)

        self._block_hash_marked_for_cleanup.discard(block_hash)
        self.node.post_block_cleanup_tasks(block_hash, block_short_ids,
                                           block_unknown_tx_hashes)
Beispiel #8
0
    def __init__(self, opts: Namespace):
        super(MockNode, self).__init__(opts)
        self.alarm_queue = AlarmQueue()
        self.network_num = DEFAULT_NETWORK_NUM

        self.broadcast_messages = []

        self._tx_service = TransactionService(self, self.network_num)
        self._tx_services = {}
Beispiel #9
0
def parse_bx_block_transactions_and_msg_tail(block_hash: Sha256Hash, bx_block: memoryview, offset: int,
                                             short_ids: List[int], block_offsets: BlockOffsets,
                                             tx_service: TransactionService,
                                             block_pieces: Deque[Union[bytearray, memoryview]]) -> \
        Tuple[List[int], List[Sha256Hash], int]:
    has_missing, unknown_tx_sids, unknown_tx_hashes = tx_service.get_missing_transactions(short_ids)
    if has_missing:
        return unknown_tx_sids, unknown_tx_hashes, offset
    short_tx_index = 0
    output_offset = offset
    while offset < block_offsets.short_id_offset:
        if bx_block[offset] == ont_constants.ONT_SHORT_ID_INDICATOR:
            try:
                sid = short_ids[short_tx_index]
            except IndexError:
                raise message_conversion_error.btc_block_decompression_error(
                    block_hash,
                    f"Message is improperly formatted, short id index ({short_tx_index}) "
                    f"exceeded its array bounds (size: {len(short_ids)})"
                )
            tx_hash, tx, _ = tx_service.get_transaction(sid)
            offset += ont_constants.ONT_SHORT_ID_INDICATOR_LENGTH
            short_tx_index += 1
        else:
            tx_size = ont_messages_util.get_next_tx_size(bx_block, offset)
            tx = bx_block[offset:offset + tx_size]
            offset += tx_size

        assert tx is not None
        block_pieces.append(tx)
        output_offset += len(tx)

    # Add consensus payload tail and owner and signature to block_pieces
    offset = block_offsets.block_begin_offset + ont_constants.ONT_HASH_LEN + ont_constants.ONT_INT_LEN + 1
    payload_tail_len, = struct.unpack_from("<L", bx_block, offset)
    offset += ont_constants.ONT_INT_LEN
    block_pieces.append(bx_block[offset: offset + payload_tail_len])
    offset += payload_tail_len
    owner_and_signature_len, = struct.unpack_from("<L", bx_block, offset)
    offset += ont_constants.ONT_INT_LEN
    block_pieces.append(bx_block[offset: offset + owner_and_signature_len])
    offset += owner_and_signature_len

    return unknown_tx_sids, unknown_tx_hashes, output_offset
def parse_bx_block_transactions(
        block_hash: Sha256Hash,
        bx_block: memoryview,
        offset: int,
        short_ids: List[int],
        block_offsets: BlockOffsets,
        tx_service: TransactionService,
        block_pieces: Deque[Union[bytearray, memoryview]]
) -> Tuple[List[int], List[Sha256Hash], int]:
    has_missing, unknown_tx_sids, unknown_tx_hashes = \
        tx_service.get_missing_transactions(short_ids)
    if has_missing:
        return unknown_tx_sids, unknown_tx_hashes, offset
    short_tx_index = 0
    output_offset = offset
    while offset < block_offsets.short_id_offset:
        if bx_block[offset] == btc_constants.BTC_SHORT_ID_INDICATOR:
            try:
                sid = short_ids[short_tx_index]
            except IndexError:
                raise message_conversion_error.btc_block_decompression_error(
                    block_hash,
                    f"Message is improperly formatted, short id index ({short_tx_index}) "
                    f"exceeded its array bounds (size: {len(short_ids)})"
                )
            tx_hash, tx, _ = tx_service.get_transaction(sid)
            offset += btc_constants.BTC_SHORT_ID_INDICATOR_LENGTH
            short_tx_index += 1
        else:
            tx_size = btc_messages_util.get_next_tx_size(bx_block, offset)
            tx = bx_block[offset:offset + tx_size]
            offset += tx_size

        # pyre-fixme[6]: Expected `Union[bytearray, memoryview]` for 1st param but
        #  got `Optional[Union[bytearray, memoryview]]`.
        block_pieces.append(tx)
        # pyre-fixme[6]: Expected `Sized` for 1st param but got
        #  `Optional[Union[bytearray, memoryview]]`.
        output_offset += len(tx)

    return unknown_tx_sids, unknown_tx_hashes, output_offset
    def __init__(self, opts, node_ssl_service: Optional[NodeSSLService] = None):
        if opts.use_extensions:
            helpers.set_extensions_parallelism()
        if node_ssl_service is None:
            node_ssl_service = MockNodeSSLService(self.NODE_TYPE, MagicMock())
        super(MockOntGatewayNode, self).__init__(opts, node_ssl_service)
        self.requester = MagicMock()

        self.broadcast_messages = []
        self.broadcast_to_nodes_messages = []
        self._tx_service = TransactionService(self, 0)
        self.block_cleanup_service = self._get_cleanup_service()
        self.block_queuing_service = OntBlockQueuingService(self)
        self.message_converter = MockMessageConverter()
        if opts.use_extensions:
            from bxcommon.services.extension_transaction_service import ExtensionTransactionService
            self._tx_service = ExtensionTransactionService(self, self.network_num)
        else:
            self._tx_service = TransactionService(self, self.network_num)
        self.opts.has_fully_updated_tx_service = True
        self.has_active_blockchain_peer = MagicMock(return_value=True)
Beispiel #12
0
    def __init__(self,
                 opts: CommonOpts,
                 node_ssl_service: Optional[NodeSSLService] = None) -> None:
        if node_ssl_service is None:
            node_ssl_service = MockNodeSSLService(self.NODE_TYPE, MagicMock())
        super(MockNode, self).__init__(opts, node_ssl_service)
        self.alarm_queue = AlarmQueue()
        self.network_num = DEFAULT_NETWORK_NUM

        self.broadcast_messages = []

        self._tx_service = TransactionService(self, self.network_num)
        self._tx_services = {}
 def init(self, use_extensions: bool):
     opts = Namespace()
     opts.use_extensions = use_extensions
     opts.import_extensions = use_extensions
     opts.tx_mem_pool_bucket_size = DEFAULT_TX_MEM_POOL_BUCKET_SIZE
     ont_message_converter = converter_factory.create_ont_message_converter(self.MAGIC, opts)
     if use_extensions:
         helpers.set_extensions_parallelism()
         tx_service = ExtensionTransactionService(MockNode(
             gateway_helpers.get_gateway_opts(8999)), 0)
     else:
         tx_service = TransactionService(MockNode(
             gateway_helpers.get_gateway_opts(8999)), 0)
     return tx_service, ont_message_converter
def contents_cleanup(transaction_service: TransactionService,
                     block_confirmation_message: AbstractCleanupMessage
                     ):
    message_hash = block_confirmation_message.message_hash()
    for short_id in block_confirmation_message.short_ids():
        transaction_service.remove_transaction_by_short_id(short_id, remove_related_short_ids=True)
    for tx_hash in block_confirmation_message.transaction_hashes():
        transaction_service.remove_transaction_by_tx_hash(tx_hash)
    transaction_service.on_block_cleaned_up(message_hash)
    logger.statistics(
        {
            "type": "MemoryCleanup",
            "event": "CacheStateAfterBlockCleanup",
            "message_hash": repr(message_hash),
            "data": transaction_service.get_cache_state_json()
        }
    )
 def init(self, use_extensions: bool):
     opts = Namespace()
     opts.use_extensions = use_extensions
     opts.import_extensions = use_extensions
     opts.tx_mem_pool_bucket_size = DEFAULT_TX_MEM_POOL_BUCKET_SIZE
     btc_message_converter = converter_factory.create_btc_message_converter(self.MAGIC, opts=opts)
     if use_extensions:
         helpers.set_extensions_parallelism()
         tx_service = ExtensionTransactionService(MockNode(
             gateway_helpers.get_gateway_opts(8999)), 0)
     else:
         tx_service = TransactionService(MockNode(
             gateway_helpers.get_gateway_opts(8999)), 0)
     if self.txns:
         for idx, txn in enumerate(self.txns):
             sha = btc_common_utils.get_txid(txn)
             if idx % 2 == 0:
                 tx_service.assign_short_id(sha, self.short_ids[int(idx/2)])
                 tx_service.set_transaction_contents(sha, txn)
     return tx_service, btc_message_converter
    def clean_block_transactions_by_block_components(
            self, block_hash: Sha256Hash,
            transactions_list: Iterable[Sha256Hash],
            transaction_service: TransactionService) -> None:
        logger.debug("Processing block for cleanup: {}", block_hash)
        tx_hash_to_contents_len_before_cleanup = transaction_service.get_tx_hash_to_contents_len(
        )
        short_id_count_before_cleanup = transaction_service.get_short_id_count(
        )

        start_time = time.time()
        if not isinstance(transactions_list, list):
            transactions_list = list(transactions_list)
        sids = []
        confirmation_msg = BlockConfirmationMessage(
            block_hash,
            transaction_service.network_num,
            tx_hashes=transactions_list,
            sids=sids)
        self.contents_cleanup(transaction_service, confirmation_msg)
        self._block_hash_marked_for_cleanup.discard(block_hash)
        end_time = time.time()
        duration = end_time - start_time
        logger.debug(
            "Finished cleaning up block {}. Processed {} hashes. Took {:.3f}s.",
            block_hash, len(transactions_list), duration)
        self.node.post_block_cleanup_tasks(block_hash, sids, transactions_list)

        transactions_processed = len(transactions_list)
        tx_hash_to_contents_len_after_cleanup = transaction_service.get_tx_hash_to_contents_len(
        )
        short_id_count_after_cleanup = transaction_service.get_short_id_count()

        transaction_service.log_block_transaction_cleanup_stats(
            block_hash, transactions_processed,
            tx_hash_to_contents_len_before_cleanup,
            tx_hash_to_contents_len_after_cleanup,
            short_id_count_before_cleanup, short_id_count_after_cleanup)
    def compact_block_to_bx_block(
        self, compact_block: CompactBlockBtcMessage,
        transaction_service: TransactionService
    ) -> CompactBlockCompressionResult:
        """
         Handle decompression of Bitcoin compact block.
         Decompression converts compact block message to full block message.
         """
        compress_start_datetime = datetime.utcnow()
        block_header = compact_block.block_header()
        sha256_hash = hashlib.sha256()
        sha256_hash.update(block_header)
        sha256_hash.update(compact_block.short_nonce_buf())
        hex_digest = sha256_hash.digest()
        key = hex_digest[0:16]

        short_ids = compact_block.short_ids()

        short_id_to_tx_contents = {}

        for tx_hash in transaction_service.iter_transaction_hashes():
            tx_hash_binary = tx_hash.binary[::-1]
            tx_short_id = compute_short_id(key, tx_hash_binary)
            if tx_short_id in short_ids:
                tx_content = transaction_service.get_transaction_by_hash(
                    tx_hash)
                if tx_content is None:
                    logger.debug(
                        "Hash {} is known by transactions service but content is missing.",
                        tx_hash)
                else:
                    short_id_to_tx_contents[tx_short_id] = tx_content
            if len(short_id_to_tx_contents) == len(short_ids):
                break

        block_transactions = []
        missing_transactions_indices = []
        pre_filled_transactions = compact_block.pre_filled_transactions()
        total_txs_count = len(pre_filled_transactions) + len(short_ids)

        size = 0
        block_msg_parts = deque()

        block_msg_parts.append(block_header)
        size += len(block_header)

        tx_count_size = btc_messages_util.get_sizeof_btc_varint(
            total_txs_count)
        tx_count_buf = bytearray(tx_count_size)
        btc_messages_util.pack_int_to_btc_varint(total_txs_count, tx_count_buf,
                                                 0)
        block_msg_parts.append(tx_count_buf)
        size += tx_count_size

        short_ids_iter = iter(short_ids.keys())

        for index in range(total_txs_count):
            if index not in pre_filled_transactions:
                short_id = next(short_ids_iter)

                if short_id in short_id_to_tx_contents:
                    short_tx = short_id_to_tx_contents[short_id]
                    block_msg_parts.append(short_tx)
                    block_transactions.append(short_tx)
                    size += len(short_tx)
                else:
                    missing_transactions_indices.append(index)
                    block_transactions.append(None)
            else:
                pre_filled_transaction = pre_filled_transactions[index]
                block_msg_parts.append(pre_filled_transaction)
                block_transactions.append(pre_filled_transaction)
                size += len(pre_filled_transaction)

        recovered_item = CompactBlockRecoveryData(block_transactions,
                                                  block_header,
                                                  compact_block.magic(),
                                                  transaction_service)

        block_info = BlockInfo(compact_block.block_hash(), [],
                               compress_start_datetime,
                               compress_start_datetime, 0, None, None, None,
                               len(compact_block.rawbytes()), None, None, [])

        if len(missing_transactions_indices) > 0:
            recovery_index = self._last_recovery_idx
            self._last_recovery_idx += 1
            self._recovery_items[
                recovery_index] = recovered_item  # pyre-ignore
            return CompactBlockCompressionResult(False, block_info, None,
                                                 recovery_index,
                                                 missing_transactions_indices,
                                                 [])
        result = CompactBlockCompressionResult(False, block_info, None, None,
                                               [], [])
        return self._recovered_compact_block_to_bx_block(
            result, recovered_item)
Beispiel #18
0
class SyncTxServiceTest(MessageFactoryTestCase):

    NETWORK_NUM = 12345

    def setUp(self) -> None:
        self.node = MockNode(helpers.get_common_opts(1234))

        self.network_num = 4
        self.transaction_service = TransactionService(self.node,
                                                      self.network_num)

    def get_message_factory(self):
        return bloxroute_message_factory

    def test_create_message_success_tx_service_sync_txs_msg(self):
        self._test_create_msg_success_tx_service_sync_with_tx_content_count(
            100)

    def test_create_message_success_tx_service_sync_txs_msg_with_exceeded_buf(
            self):
        self._test_create_msg_success_tx_service_sync_with_tx_content_count(
            1000)

    def _test_create_msg_success_tx_service_sync_with_tx_content_count(
            self, tx_content_count, sync_tx_content=True):
        short_ids = [
            list(range(1, 6)),
            list(range(11, 15)),
            list(range(53, 250)), [31],
            list(range(41, 48)), [51, 52]
        ]
        transaction_hashes = list(
            map(crypto.double_sha256, map(bytes, short_ids)))

        for i in range(len(short_ids)):
            transaction_content = bytearray(tx_content_count)
            transaction_content[:32] = transaction_hashes[i]
            transaction_key = self.transaction_service.get_transaction_key(
                transaction_hashes[i])
            self.transaction_service.set_transaction_contents_by_key(
                transaction_key, transaction_content)
            for short_id in short_ids[i]:
                self.transaction_service.assign_short_id_by_key(
                    transaction_key, short_id)

        # Six blocks received after
        for i in range(len(short_ids)):
            self.transaction_service.track_seen_short_ids(
                Sha256Hash(helpers.generate_bytearray(32)), short_ids[i])

        tx_service_snap = self.transaction_service.get_snapshot()

        txs_content_short_ids = tx_sync_service_helpers.create_txs_service_msg(
            self.transaction_service, tx_service_snap, sync_tx_content)

        if txs_content_short_ids:
            self._send_tx_msg(txs_content_short_ids, transaction_hashes)

    def _send_tx_msg(self, txs_content_short_ids, transaction_hashes):
        tx_service_sync_txs_msg: TxServiceSyncTxsMessage = \
            self.create_message_successfully(
                TxServiceSyncTxsMessage(
                    self.NETWORK_NUM, txs_content_short_ids
                ),
                TxServiceSyncTxsMessage
            )

        self.assertEqual(self.NETWORK_NUM,
                         tx_service_sync_txs_msg.network_num())
        self.assertEqual(len(txs_content_short_ids),
                         tx_service_sync_txs_msg.tx_count())
        tx_service_txs_content_short_ids = tx_service_sync_txs_msg.txs_content_short_ids(
        )
        tx_contents = [
            self.transaction_service.get_transaction(short_id).contents
            for tx_content_short_id in tx_service_txs_content_short_ids
            for short_id in tx_content_short_id.short_ids
        ]
        for tx_content_short_id in tx_service_txs_content_short_ids:
            self.assertIn(bytearray(tx_content_short_id.tx_hash),
                          transaction_hashes)
            self.assertIn(bytearray(tx_content_short_id.tx_content),
                          tx_contents)
            self.assertEqual(
                tx_content_short_id.short_ids,
                list(
                    self.transaction_service.get_short_ids_by_key(
                        self.transaction_service.get_transaction_key(
                            tx_content_short_id.tx_hash))))
Beispiel #19
0
    def block_to_bx_block(
        self,
        block_msg: OntConsensusMessage,
        tx_service: TransactionService,
        enable_block_compression: bool,
        min_tx_age_seconds: float
    ) -> Tuple[memoryview, BlockInfo]:
        """
        Pack an Ontology consensus message's transactions into a bloXroute block.
        """
        consensus_msg = block_msg
        compress_start_datetime = datetime.utcnow()
        compress_start_timestamp = time.time()
        size = 0
        buf = deque()
        short_ids = []
        ignored_sids = []
        original_size = len(consensus_msg.rawbytes())

        consensus_payload_header = consensus_msg.consensus_payload_header()
        consensus_payload_header_len = bytearray(ont_constants.ONT_INT_LEN)
        struct.pack_into("<L", consensus_payload_header_len, 0, len(consensus_payload_header))
        size += ont_constants.ONT_INT_LEN
        buf.append(consensus_payload_header_len)
        size += len(consensus_payload_header)
        buf.append(consensus_payload_header)
        consensus_data_type = bytearray(ont_constants.ONT_CHAR_LEN)
        struct.pack_into("<B", consensus_data_type, 0, consensus_msg.consensus_data_type())
        size += ont_constants.ONT_CHAR_LEN
        buf.append(consensus_data_type)
        consensus_data_len = bytearray(ont_constants.ONT_INT_LEN)
        struct.pack_into("<L", consensus_data_len, 0, consensus_msg.consensus_data_len())
        size += ont_constants.ONT_INT_LEN
        buf.append(consensus_data_len)
        block_start_len = consensus_msg.block_start_len_memoryview()
        txn_header = consensus_msg.txn_header()
        block_start_len_and_txn_header_total_len = bytearray(ont_constants.ONT_INT_LEN)
        struct.pack_into("<L", block_start_len_and_txn_header_total_len, 0, len(block_start_len) + len(txn_header))
        size += ont_constants.ONT_INT_LEN
        buf.append(block_start_len_and_txn_header_total_len)
        size += len(block_start_len)
        buf.append(block_start_len)
        size += len(txn_header)
        buf.append(txn_header)
        max_timestamp_for_compression = time.time() - min_tx_age_seconds

        for tx in consensus_msg.txns():
            tx_hash, _ = ont_messages_util.get_txid(tx)
            short_id = tx_service.get_short_id(tx_hash)
            short_id_assign_time = 0

            if short_id != constants.NULL_TX_SID:
                short_id_assign_time = tx_service.get_short_id_assign_time(short_id)

            if short_id == constants.NULL_TX_SID or \
                    not enable_block_compression or \
                    short_id_assign_time > max_timestamp_for_compression:
                if short_id != constants.NULL_TX_SIDS:
                    ignored_sids.append(short_id)
                buf.append(tx)
                size += len(tx)
            else:
                short_ids.append(short_id)
                buf.append(ont_constants.ONT_SHORT_ID_INDICATOR_AS_BYTEARRAY)
                size += 1

        # Prepend owner and signature, consensus payload tail, tx count and block hash to bx_block
        owner_and_signature = consensus_msg.owner_and_signature()
        owner_and_signature_len = bytearray(ont_constants.ONT_INT_LEN)
        struct.pack_into("<L", owner_and_signature_len, 0, len(owner_and_signature))
        size += len(owner_and_signature)
        buf.appendleft(owner_and_signature)
        size += ont_constants.ONT_INT_LEN
        buf.appendleft(owner_and_signature_len)
        payload_tail = consensus_msg.payload_tail()
        payload_tail_len = bytearray(ont_constants.ONT_INT_LEN)
        struct.pack_into("<L", payload_tail_len, 0, len(payload_tail))
        size += len(payload_tail)
        buf.appendleft(payload_tail)
        size += ont_constants.ONT_INT_LEN
        buf.appendleft(payload_tail_len)
        txn_count = bytearray(ont_constants.ONT_INT_LEN)
        struct.pack_into("<L", txn_count, 0, consensus_msg.txn_count())
        size += ont_constants.ONT_INT_LEN
        buf.appendleft(txn_count)
        block_hash = consensus_msg.block_hash().binary
        size += ont_constants.ONT_HASH_LEN
        buf.appendleft(block_hash)

        is_consensus_msg_buf = struct.pack("?", True)
        buf.appendleft(is_consensus_msg_buf)
        size += 1

        block = finalize_block_bytes(buf, size, short_ids)

        prev_block_hash = convert.bytes_to_hex(consensus_msg.prev_block_hash().binary)
        bx_block_hash = convert.bytes_to_hex(crypto.double_sha256(block))

        block_info = BlockInfo(
            consensus_msg.block_hash(),
            short_ids,
            compress_start_datetime,
            datetime.utcnow(),
            (time.time() - compress_start_timestamp) * 1000,
            consensus_msg.txn_count(),
            bx_block_hash,
            prev_block_hash,
            original_size,
            size,
            100 - float(size) / original_size * 100,
            ignored_sids
        )

        return memoryview(block), block_info
def _log_compressed_block_debug_info_eth(transaction_service: TransactionService,
                                         block_msg_bytes: Union[memoryview, bytearray]):
    is_block_relay = transaction_service.node.NODE_TYPE == NodeType.RELAY_BLOCK
    block_hash, short_ids, txs_bytes = _parse_block_eth(block_msg_bytes)

    # parse statistics variables
    short_tx_index = 0
    tx_start_index = 0

    tx_index_in_block = 0
    txs_info = []
    missing_short_ids = []

    while True:
        if tx_start_index >= len(txs_bytes):
            break

        short_id = 0
        has_contents = False
        assignmnet_time = 0

        _, tx_itm_len, tx_itm_start = rlp_utils.consume_length_prefix(txs_bytes, tx_start_index)
        tx_bytes = txs_bytes[tx_itm_start:tx_itm_start + tx_itm_len]

        is_full_tx_start = 0
        is_full_tx, is_full_tx_len, = rlp_utils.decode_int(tx_bytes, is_full_tx_start)

        _, tx_content_len, tx_content_start = rlp_utils.consume_length_prefix(
            tx_bytes, is_full_tx_start + is_full_tx_len)
        tx_content_bytes = tx_bytes[tx_content_start:tx_content_start + tx_content_len]

        if is_full_tx:
            tx_hash = Sha256Hash(eth_common_utils.keccak_hash(tx_content_bytes))
        else:
            short_id = short_ids[short_tx_index]
            tx_hash, tx_bytes, _ = transaction_service.get_transaction(short_id)
            has_contents = tx_bytes is not None
            if tx_hash is not None:
                assignmnet_time = transaction_service.get_short_id_assign_time(short_id)
            short_tx_index += 1

        if is_block_relay:
            txs_info.append((tx_index_in_block, not is_full_tx, short_id, tx_hash))
        else:
            txs_info.append((tx_index_in_block, not is_full_tx, short_id, tx_hash, has_contents, assignmnet_time))

        tx_index_in_block += 1
        tx_start_index = tx_itm_start + tx_itm_len

        if not is_full_tx and not has_contents:
            missing_short_ids.append(short_id)

    if is_block_relay:
        log_message = \
            "Block content (from block relay) {} from (index, is compressed, short id, hash is full) : {}"
    else:
        log_message = \
            "Block content (full) {} (index, compressed, short id, hash, has contents, assignment time) : {}"

    logger.debug(
        log_message,
        block_hash,
        ",".join(str(tx_info) for tx_info in txs_info)
    )

    node_type = transaction_service.node.NODE_TYPE
    assert node_type is not None
    log_can_decompress_block(node_type, block_hash, missing_short_ids)
    def block_to_bx_block(self, block_msg: BlockOntMessage, tx_service: TransactionService) -> \
            Tuple[memoryview, BlockInfo]:
        """
        Compresses a Ontology block's transactions and packs it into a bloXroute block.
        """
        compress_start_datetime = datetime.utcnow()
        compress_start_timestamp = time.time()
        size = 0
        buf = deque()
        short_ids = []
        header = block_msg.txn_header()
        size += len(header)
        buf.append(header)

        for tx in block_msg.txns():
            tx_hash, _ = ont_messages_util.get_txid(tx)
            short_id = tx_service.get_short_id(tx_hash)
            if short_id == constants.NULL_TX_SID:
                buf.append(tx)
                size += len(tx)
            else:
                short_ids.append(short_id)
                buf.append(ont_constants.ONT_SHORT_ID_INDICATOR_AS_BYTEARRAY)
                size += 1

        serialized_short_ids = compact_block_short_ids_serializer.serialize_short_ids_into_bytes(
            short_ids)
        buf.append(serialized_short_ids)
        size += constants.UL_ULL_SIZE_IN_BYTES

        merkle_root = block_msg.merkle_root()
        buf.appendleft(merkle_root)
        size += ont_constants.ONT_HASH_LEN

        is_consensus_msg_buf = struct.pack("?", False)
        buf.appendleft(is_consensus_msg_buf)
        size += 1

        offset_buf = struct.pack("<Q", size)
        buf.appendleft(offset_buf)
        size += len(serialized_short_ids)

        block = bytearray(size)
        off = 0
        for blob in buf:
            next_off = off + len(blob)
            block[off:next_off] = blob
            off = next_off

        prev_block_hash = convert.bytes_to_hex(
            block_msg.prev_block_hash().binary)
        bx_block_hash = convert.bytes_to_hex(crypto.double_sha256(block))
        original_size = len(block_msg.rawbytes())

        block_info = BlockInfo(block_msg.block_hash(), short_ids,
                               compress_start_datetime, datetime.utcnow(),
                               (time.time() - compress_start_timestamp) * 1000,
                               block_msg.txn_count(), bx_block_hash,
                               prev_block_hash, original_size, size,
                               100 - float(size) / original_size * 100)
        return memoryview(block), block_info
Beispiel #22
0
    def block_to_bx_block(
            self, block_msg: InternalEthBlockInfo,
            tx_service: TransactionService, enable_block_compression: bool,
            min_tx_age_seconds: float) -> Tuple[memoryview, BlockInfo]:
        """
        Convert Ethereum new block message to internal broadcast message with transactions replaced with short ids

        The code is optimized and does not make copies of bytes

        :param block_msg: Ethereum new block message
        :param tx_service: Transactions service
        :param enable_block_compression
        :param min_tx_age_seconds
        :return: Internal broadcast message bytes (bytearray), tuple (txs count, previous block hash)
        """

        compress_start_datetime = datetime.datetime.utcnow()
        compress_start_timestamp = time.time()

        txs_bytes, block_hdr_full_bytes, remaining_bytes, prev_block_bytes = parse_block_message(
            block_msg)

        used_short_ids = []

        # creating transactions content
        content_size = 0
        buf = deque()
        ignored_sids = []

        tx_start_index = 0
        tx_count = 0
        original_size = len(block_msg.rawbytes())
        max_timestamp_for_compression = time.time() - min_tx_age_seconds

        while True:
            if tx_start_index >= len(txs_bytes):
                break

            _, tx_item_length, tx_item_start = rlp_utils.consume_length_prefix(
                txs_bytes, tx_start_index)
            tx_bytes = txs_bytes[tx_start_index:tx_item_start + tx_item_length]
            tx_hash_bytes = eth_common_utils.keccak_hash(tx_bytes)
            tx_hash = Sha256Hash(tx_hash_bytes)
            tx_key = tx_service.get_transaction_key(tx_hash)
            short_id = tx_service.get_short_id_by_key(tx_key)
            short_id_assign_time = 0

            if short_id != constants.NULL_TX_SID:
                short_id_assign_time = tx_service.get_short_id_assign_time(
                    short_id)

            if short_id <= constants.NULL_TX_SID or \
                    not enable_block_compression or short_id_assign_time > max_timestamp_for_compression:
                if short_id > constants.NULL_TX_SID:
                    ignored_sids.append(short_id)
                is_full_tx_bytes = rlp_utils.encode_int(1)
                tx_content_bytes = tx_bytes
            else:
                is_full_tx_bytes = rlp_utils.encode_int(0)
                used_short_ids.append(short_id)
                tx_content_bytes = bytes()

            tx_content_prefix = rlp_utils.get_length_prefix_str(
                len(tx_content_bytes))

            short_tx_content_size = len(is_full_tx_bytes) + len(
                tx_content_prefix) + len(tx_content_bytes)

            short_tx_content_prefix_bytes = rlp_utils.get_length_prefix_list(
                short_tx_content_size)

            buf.append(short_tx_content_prefix_bytes)
            buf.append(is_full_tx_bytes)
            buf.append(tx_content_prefix)
            buf.append(tx_content_bytes)

            content_size += len(
                short_tx_content_prefix_bytes) + short_tx_content_size

            tx_start_index = tx_item_start + tx_item_length

            tx_count += 1

        list_of_txs_prefix_bytes = rlp_utils.get_length_prefix_list(
            content_size)
        buf.appendleft(list_of_txs_prefix_bytes)
        content_size += len(list_of_txs_prefix_bytes)

        buf.appendleft(block_hdr_full_bytes)
        content_size += len(block_hdr_full_bytes)

        buf.append(remaining_bytes)
        content_size += len(remaining_bytes)

        compact_block_msg_prefix = rlp_utils.get_length_prefix_list(
            content_size)
        buf.appendleft(compact_block_msg_prefix)
        content_size += len(compact_block_msg_prefix)

        block = finalize_block_bytes(buf, content_size, used_short_ids)
        bx_block_hash = convert.bytes_to_hex(crypto.double_sha256(block))

        block_info = BlockInfo(block_msg.block_hash(), used_short_ids,
                               compress_start_datetime,
                               datetime.datetime.utcnow(),
                               (time.time() - compress_start_timestamp) * 1000,
                               tx_count, bx_block_hash,
                               convert.bytes_to_hex(prev_block_bytes),
                               original_size, content_size,
                               100 - float(content_size) / original_size * 100,
                               ignored_sids)

        return memoryview(block), block_info
Beispiel #23
0
    def block_to_bx_block(
            self, block_msg: OntConsensusMessage, tx_service: TransactionService
    ) -> Tuple[memoryview, BlockInfo]:
        """
        Compresses a Ontology consensus message's transactions and packs it into a bloXroute block.
        """
        consensus_msg = block_msg
        compress_start_datetime = datetime.utcnow()
        compress_start_timestamp = time.time()
        size = 0
        buf = deque()
        short_ids = []
        consensus_payload_header = consensus_msg.consensus_payload_header()
        consensus_payload_header_len = bytearray(ont_constants.ONT_INT_LEN)
        struct.pack_into("<L", consensus_payload_header_len, 0, len(consensus_payload_header))
        size += ont_constants.ONT_INT_LEN
        buf.append(consensus_payload_header_len)
        size += len(consensus_payload_header)
        buf.append(consensus_payload_header)
        consensus_data_type = bytearray(ont_constants.ONT_CHAR_LEN)
        struct.pack_into("<B", consensus_data_type, 0, consensus_msg.consensus_data_type())
        size += ont_constants.ONT_CHAR_LEN
        buf.append(consensus_data_type)
        consensus_data_len = bytearray(ont_constants.ONT_INT_LEN)
        struct.pack_into("<L", consensus_data_len, 0, consensus_msg.consensus_data_len())
        size += ont_constants.ONT_INT_LEN
        buf.append(consensus_data_len)
        block_start_len = consensus_msg.block_start_len_memoryview()
        txn_header = consensus_msg.txn_header()
        block_start_len_and_txn_header_total_len = bytearray(ont_constants.ONT_INT_LEN)
        struct.pack_into("<L", block_start_len_and_txn_header_total_len, 0, len(block_start_len) + len(txn_header))
        size += ont_constants.ONT_INT_LEN
        buf.append(block_start_len_and_txn_header_total_len)
        size += len(block_start_len)
        buf.append(block_start_len)
        size += len(txn_header)
        buf.append(txn_header)

        for tx in consensus_msg.txns():
            tx_hash, _ = ont_messages_util.get_txid(tx)
            short_id = tx_service.get_short_id(tx_hash)
            if short_id == constants.NULL_TX_SID:
                buf.append(tx)
                size += len(tx)
            else:
                short_ids.append(short_id)
                buf.append(ont_constants.ONT_SHORT_ID_INDICATOR_AS_BYTEARRAY)
                size += 1

        # Prepend owner and signature, consensus payload tail, tx count and block hash to bx_block
        owner_and_signature = consensus_msg.owner_and_signature()
        owner_and_signature_len = bytearray(ont_constants.ONT_INT_LEN)
        struct.pack_into("<L", owner_and_signature_len, 0, len(owner_and_signature))
        size += len(owner_and_signature)
        buf.appendleft(owner_and_signature)
        size += ont_constants.ONT_INT_LEN
        buf.appendleft(owner_and_signature_len)
        payload_tail = consensus_msg.payload_tail()
        payload_tail_len = bytearray(ont_constants.ONT_INT_LEN)
        struct.pack_into("<L", payload_tail_len, 0, len(payload_tail))
        size += len(payload_tail)
        buf.appendleft(payload_tail)
        size += ont_constants.ONT_INT_LEN
        buf.appendleft(payload_tail_len)
        txn_count = bytearray(ont_constants.ONT_INT_LEN)
        struct.pack_into("<L", txn_count, 0, consensus_msg.txn_count())
        size += ont_constants.ONT_INT_LEN
        buf.appendleft(txn_count)
        block_hash = consensus_msg.block_hash().binary
        size += ont_constants.ONT_HASH_LEN
        buf.appendleft(block_hash)

        is_consensus_msg_buf = struct.pack("?", True)
        buf.appendleft(is_consensus_msg_buf)
        size += 1

        serialized_short_ids = compact_block_short_ids_serializer.serialize_short_ids_into_bytes(short_ids)
        buf.append(serialized_short_ids)
        size += constants.UL_ULL_SIZE_IN_BYTES
        offset_buf = struct.pack("<Q", size)
        buf.appendleft(offset_buf)
        size += len(serialized_short_ids)

        block = bytearray(size)
        off = 0
        for blob in buf:
            next_off = off + len(blob)
            block[off:next_off] = blob
            off = next_off

        prev_block_hash = convert.bytes_to_hex(consensus_msg.prev_block_hash().binary)
        bx_block_hash = convert.bytes_to_hex(crypto.double_sha256(block))
        original_size = len(consensus_msg.rawbytes())

        block_info = BlockInfo(
            consensus_msg.block_hash(),
            short_ids,
            compress_start_datetime,
            datetime.utcnow(),
            (time.time() - compress_start_timestamp) * 1000,
            consensus_msg.txn_count(),
            bx_block_hash,
            prev_block_hash,
            original_size,
            size,
            100 - float(size) / original_size * 100
        )
        return memoryview(block), block_info
Beispiel #24
0
    def block_to_bx_block(
            self, block_msg: InternalEthBlockInfo,
            tx_service: TransactionService) -> Tuple[memoryview, BlockInfo]:
        """
        Convert Ethereum new block message to internal broadcast message with transactions replaced with short ids

        The code is optimized and does not make copies of bytes

        :param block_msg: Ethereum new block message
        :param tx_service: Transactions service
        :return: Internal broadcast message bytes (bytearray), tuple (txs count, previous block hash)
        """

        compress_start_datetime = datetime.datetime.utcnow()
        compress_start_timestamp = time.time()
        msg_bytes = memoryview(block_msg.rawbytes())

        _, block_msg_itm_len, block_msg_itm_start = rlp_utils.consume_length_prefix(
            msg_bytes, 0)

        block_msg_bytes = msg_bytes[block_msg_itm_start:block_msg_itm_start +
                                    block_msg_itm_len]

        _, block_hdr_itm_len, block_hdr_itm_start = rlp_utils.consume_length_prefix(
            block_msg_bytes, 0)
        block_hdr_full_bytes = block_msg_bytes[0:block_hdr_itm_start +
                                               block_hdr_itm_len]
        block_hdr_bytes = block_msg_bytes[
            block_hdr_itm_start:block_hdr_itm_start + block_hdr_itm_len]

        _, prev_block_itm_len, prev_block_itm_start = rlp_utils.consume_length_prefix(
            block_hdr_bytes, 0)
        prev_block_bytes = block_hdr_bytes[
            prev_block_itm_start:prev_block_itm_start + prev_block_itm_len]

        _, txs_itm_len, txs_itm_start = rlp_utils.consume_length_prefix(
            block_msg_bytes, block_hdr_itm_start + block_hdr_itm_len)
        txs_bytes = block_msg_bytes[txs_itm_start:txs_itm_start + txs_itm_len]

        remaining_bytes = block_msg_bytes[txs_itm_start + txs_itm_len:]

        used_short_ids = []

        # creating transactions content
        content_size = 0
        buf = deque()

        tx_start_index = 0
        tx_count = 0

        while True:
            if tx_start_index >= len(txs_bytes):
                break

            _, tx_item_length, tx_item_start = rlp_utils.consume_length_prefix(
                txs_bytes, tx_start_index)
            tx_bytes = txs_bytes[tx_start_index:tx_item_start + tx_item_length]
            tx_hash_bytes = eth_common_utils.keccak_hash(tx_bytes)
            tx_hash = Sha256Hash(tx_hash_bytes)
            short_id = tx_service.get_short_id(tx_hash)

            if short_id <= 0:
                is_full_tx_bytes = rlp_utils.encode_int(1)
                tx_content_bytes = tx_bytes
            else:
                is_full_tx_bytes = rlp_utils.encode_int(0)
                used_short_ids.append(short_id)
                tx_content_bytes = bytes()

            tx_content_prefix = rlp_utils.get_length_prefix_str(
                len(tx_content_bytes))

            short_tx_content_size = len(is_full_tx_bytes) + len(
                tx_content_prefix) + len(tx_content_bytes)

            short_tx_content_prefix_bytes = rlp_utils.get_length_prefix_list(
                short_tx_content_size)

            buf.append(short_tx_content_prefix_bytes)
            buf.append(is_full_tx_bytes)
            buf.append(tx_content_prefix)
            buf.append(tx_content_bytes)

            content_size += len(
                short_tx_content_prefix_bytes) + short_tx_content_size

            tx_start_index = tx_item_start + tx_item_length

            tx_count += 1

        list_of_txs_prefix_bytes = rlp_utils.get_length_prefix_list(
            content_size)
        buf.appendleft(list_of_txs_prefix_bytes)
        content_size += len(list_of_txs_prefix_bytes)

        buf.appendleft(block_hdr_full_bytes)
        content_size += len(block_hdr_full_bytes)

        buf.append(remaining_bytes)
        content_size += len(remaining_bytes)

        compact_block_msg_prefix = rlp_utils.get_length_prefix_list(
            content_size)
        buf.appendleft(compact_block_msg_prefix)
        content_size += len(compact_block_msg_prefix)

        short_ids_bytes = compact_block_short_ids_serializer.serialize_short_ids_into_bytes(
            used_short_ids)
        buf.append(short_ids_bytes)
        content_size += constants.UL_ULL_SIZE_IN_BYTES
        offset_buf = struct.pack("<Q", content_size)
        buf.appendleft(offset_buf)
        content_size += len(short_ids_bytes)

        # Parse it into the bloXroute message format and send it along
        block = bytearray(content_size)
        off = 0
        for blob in buf:
            next_off = off + len(blob)
            block[off:next_off] = blob
            off = next_off

        bx_block_hash = convert.bytes_to_hex(crypto.double_sha256(block))
        original_size = len(block_msg.rawbytes())

        block_info = BlockInfo(block_msg.block_hash(), used_short_ids,
                               compress_start_datetime,
                               datetime.datetime.utcnow(),
                               (time.time() - compress_start_timestamp) * 1000,
                               tx_count, bx_block_hash,
                               convert.bytes_to_hex(prev_block_bytes),
                               original_size, content_size,
                               100 - float(content_size) / original_size * 100)
        return memoryview(block), block_info
Beispiel #25
0
class SyncTxServiceTest(MessageFactoryTestCase):

    NETWORK_NUM = 12345

    def setUp(self) -> None:
        self.node = MockNode(helpers.get_common_opts(1234))

        self.network_num = 4
        self.transaction_service = TransactionService(self.node,
                                                      self.network_num)

    def _add_transactions(self, tx_count, tx_size, short_id_offset=0):
        short_id = short_id_offset
        for i in range(int(tx_count)):
            tx_hash = Sha256Hash(
                binary=helpers.generate_bytearray(crypto.SHA256_HASH_LEN))
            tx_content = helpers.generate_bytearray(tx_size)
            transaction_key = self.transaction_service.get_transaction_key(
                tx_hash)
            self.transaction_service.set_transaction_contents_by_key(
                transaction_key, tx_content)
            for _ in range(random.randrange(1, 10)):
                short_id += 1
                self.transaction_service.assign_short_id(tx_hash, short_id)
                self.transaction_service.set_short_id_transaction_type(
                    short_id, TransactionFlag.PAID_TX)
                if short_id % 7 < 2:
                    self.transaction_service._short_id_to_tx_cache_key.pop(
                        short_id, None)

    @skip("We don't sync tx service using time")
    def test_create_tx_service_msg(self):
        self._add_transactions(100000, tx_size=50)
        done = False
        msgs = []
        timestamp = 0
        snapshot_cache_keys = None
        total_time = 0
        total_txs = 0
        while not done:
            start_ = time.time()
            txs_content_short_ids, timestamp, done, snapshot_cache_keys = \
                tx_sync_service_helpers.create_txs_service_msg_from_time(
                    self.transaction_service,
                    timestamp,
                    False,
                    snapshot_cache_keys
                )
            duration = time.time() - start_
            total_time += duration
            total_txs += len(txs_content_short_ids)
            msgs.append(txs_content_short_ids)
            # print(f"txs:{len(txs_content_short_ids)}, time: {duration}")
        print(f"total - msgs:{len(msgs)}, time:{total_time}")
        msg_build_time = 0
        for txs_content_short_ids in msgs:
            start_ = time.time()
            msg = TxServiceSyncTxsMessage(self.network_num,
                                          txs_content_short_ids)
            duration = time.time() - start_
            msg_build_time += duration
        print(f"total - message creation time: {msg_build_time}")
        self.assertTrue(True)

    @skip("We don't sync tx service using snapshot")
    def test_create_tx_service_msg_snapshot(self):
        self._add_transactions(100000, tx_size=50)
        total_time = 0
        start_ = time.time()
        snapshot = self.transaction_service.get_snapshot(0)
        duration = time.time() - start_
        print(f"snapshot creation: {duration}")
        total_time += duration
        msgs = []

        while snapshot:
            start_ = time.time()
            txs_content_short_ids = tx_sync_service_helpers.create_txs_service_msg(
                self.transaction_service, snapshot, sync_tx_content=False)
            msgs.append(txs_content_short_ids)
            duration = time.time() - start_
            total_time += duration
            # print(len(txs_content_short_ids), duration)
        print(f"total time: {total_time}")

    @skip("We don't sync tx service using snapshot")
    def test_create_tx_service_msg_snapshot_by_time(self):
        self._add_transactions(100000, tx_size=50)
        total_time = 0
        start_ = time.time()
        snapshot = self.transaction_service.get_snapshot(1.0)
        duration = time.time() - start_
        print(f"snapshot creation: {duration}")
        total_time += duration
        msgs = []

        while snapshot:
            start_ = time.time()
            txs_content_short_ids = tx_sync_service_helpers.create_txs_service_msg(
                self.transaction_service, snapshot, sync_tx_content=False)
            msgs.append(txs_content_short_ids)
            duration = time.time() - start_
            total_time += duration
            # print(len(txs_content_short_ids), duration)
        print(f"total time: {total_time}")
 def _get_transaction_service(self) -> TransactionService:
     return TransactionService(self.mock_node, 0)
Beispiel #27
0
    def setUp(self) -> None:
        self.node = MockNode(helpers.get_common_opts(1234))

        self.network_num = 4
        self.transaction_service = TransactionService(self.node,
                                                      self.network_num)