def block_to_bx_block(self, block_msg: BlockOntMessage, tx_service: ExtensionTransactionService) -> \
            Tuple[memoryview, BlockInfo]:
        compress_start_datetime = datetime.utcnow()
        compress_start_timestamp = time.time()
        self._default_block_size = max(self._default_block_size,
                                       len(block_msg.buf))
        tsk = self.compression_tasks.borrow_task()
        tsk.init(tpe.InputBytes(block_msg.buf), tx_service.proxy)
        try:
            task_pool_proxy.run_task(tsk)
        except tpe.AggregatedException as e:
            self.compression_tasks.return_task(tsk)
            raise message_conversion_error.btc_block_compression_error(
                block_msg.block_hash(), e)
        bx_block = tsk.bx_block()
        block = memoryview(bx_block)
        compressed_size = len(block)
        original_size = len(block_msg.rawbytes())
        block_hash = OntObjectHash(
            binary=convert.hex_to_bytes(tsk.block_hash().hex_string()))

        block_info = BlockInfo(
            block_hash, tsk.short_ids(), compress_start_datetime,
            datetime.utcnow(), (time.time() - compress_start_timestamp) * 1000,
            tsk.txn_count(),
            tsk.compressed_block_hash().hex_string(),
            tsk.prev_block_hash().hex_string(), original_size, compressed_size,
            100 - float(compressed_size) / original_size * 100)
        self.compression_tasks.return_task(tsk)
        return block, block_info
Beispiel #2
0
 def msg_block(self, msg: BlockOntMessage) -> None:
     block_stats.add_block_event_by_block_hash(
         msg.block_hash(),
         BlockStatEventType.REMOTE_BLOCK_RECEIVED_BY_GATEWAY,
         network_num=self.connection.network_num,
         more_info="Protocol: {}, Network: {}".format(
             self.connection.node.opts.blockchain_protocol,
             self.connection.node.opts.blockchain_network))
     return self.msg_proxy_response(msg)
    def test_ont_block_to_bloxroute_block_and_back_sids_found(self):

        prev_block_hash = bytearray(crypto.bitcoin_hash(b"123"))
        prev_block = OntObjectHash(prev_block_hash, length=SHA256_HASH_LEN)
        merkle_root_hash = bytearray(crypto.bitcoin_hash(b"234"))
        merkle_root = OntObjectHash(merkle_root_hash, length=SHA256_HASH_LEN)
        txns_root_hash = bytearray(crypto.bitcoin_hash(b"345"))
        txns_root = OntObjectHash(txns_root_hash, length=SHA256_HASH_LEN)
        block_root_hash = bytearray(crypto.bitcoin_hash(b"456"))
        block_root = OntObjectHash(block_root_hash, length=SHA256_HASH_LEN)
        consensus_payload = bytes(b'111')
        next_bookkeeper = bytes(b'222')
        bookkeepers = [bytes(33)] * 5
        sig_data = [bytes(2)] * 3
        txns = []
        timestamp = 1
        height = 2
        consensus_data = 3

        ont_block = BlockOntMessage(self.magic, self.version, prev_block,
                                    txns_root, block_root, timestamp, height,
                                    consensus_data, consensus_payload,
                                    next_bookkeeper, bookkeepers, sig_data,
                                    txns, merkle_root)
        block_hash = ont_block.block_hash()
        bloxroute_block, block_info = self.ont_message_converter.block_to_bx_block(
            ont_block, self.tx_service, True, 0)
        self.assertEqual(0, block_info.txn_count)
        self.assertEqual(self.short_ids, list(block_info.short_ids))
        self.assertEqual(ont_block.block_hash(), block_info.block_hash)

        parsed_ont_block, block_info, _, _ = self.ont_message_converter.bx_block_to_block(
            bloxroute_block, self.tx_service)
        self.assertIsNotNone(block_info)
        self.assertEqual(ont_block.rawbytes().tobytes(),
                         parsed_ont_block.rawbytes().tobytes())
        self.assertEqual(self.magic, parsed_ont_block.magic())
        self.assertEqual(
            prev_block_hash,
            parsed_ont_block.prev_block_hash().get_little_endian())
        self.assertEqual(ont_block.checksum(), parsed_ont_block.checksum())
        self.assertEqual(block_hash, parsed_ont_block.block_hash())
        self.assertEqual(block_hash.binary, block_info.block_hash.binary)
        self.assertEqual(timestamp, parsed_ont_block.timestamp())
Beispiel #4
0
    def block_to_bx_block(self, block_msg: BlockOntMessage, tx_service: TransactionService) \
            -> Tuple[memoryview, BlockInfo]:
        start_datetime = datetime.utcnow()
        start_time = time.time()

        block_info = BlockInfo(
            block_msg.block_hash(),
            [],
            start_datetime,
            datetime.utcnow(),
            (time.time() - start_time) * 1000,
            block_msg.txn_count(),
            str(block_msg.block_hash()),
            convert.bytes_to_hex(block_msg.prev_block_hash().binary),
            len(block_msg.rawbytes()),
            len(block_msg.rawbytes()),
            0
        )
        return block_msg.rawbytes(), block_info
Beispiel #5
0
    def bx_block_to_block(self, bx_block_msg: memoryview, tx_service: TransactionService) -> BlockDecompressionResult:
        start_datetime = datetime.utcnow()
        start_time = time.time()

        block_msg = BlockOntMessage(buf=bytearray(bx_block_msg))

        block_info = BlockInfo(
            block_msg.block_hash(),
            [],
            start_datetime,
            datetime.utcnow(),
            (time.time() - start_time) * 1000,
            block_msg.txn_count(),
            str(block_msg.block_hash()),
            convert.bytes_to_hex(block_msg.prev_block_hash().binary),
            len(block_msg.rawbytes()),
            len(block_msg.rawbytes()),
            0
        )
        return BlockDecompressionResult(block_msg, block_info, [], [])
Beispiel #6
0
    def clean_block_transactions(
            self, block_msg: BlockOntMessage,
            transaction_service: TransactionService) -> None:
        block_short_ids = []
        block_unknown_tx_hashes = []
        start_time = time.time()

        short_ids_count = 0
        unknown_tx_hashes_count = 0
        transactions_processed = 0

        tx_hash_to_contents_len_before_cleanup = transaction_service.get_tx_hash_to_contents_len(
        )
        short_id_count_before_cleanup = transaction_service.get_short_id_count(
        )

        for tx in block_msg.txns():
            tx_hash = OntObjectHash(buf=crypto.double_sha256(tx),
                                    length=ont_constants.ONT_HASH_LEN)
            short_ids = transaction_service.remove_transaction_by_tx_hash(
                tx_hash, force=True)
            if short_ids is None:
                unknown_tx_hashes_count += 1
                block_unknown_tx_hashes.append(tx_hash)
            else:
                short_ids_count += len(short_ids)
                block_short_ids.extend(short_ids)
            transactions_processed += 1
        block_hash = block_msg.block_hash()
        transaction_service.on_block_cleaned_up(block_hash)
        end_time = time.time()
        duration = end_time - start_time
        tx_hash_to_contents_len_after_cleanup = transaction_service.get_tx_hash_to_contents_len(
        )
        short_id_count_after_cleanup = transaction_service.get_short_id_count()

        logger.debug(
            "Finished cleaning up block {}. Processed {} hashes, {} of which were unknown, and cleaned up {} "
            "short ids. Took {:.3f}s.", block_hash, transactions_processed,
            unknown_tx_hashes_count, short_ids_count, duration)

        transaction_service.log_block_transaction_cleanup_stats(
            block_hash, block_msg.txn_count(),
            tx_hash_to_contents_len_before_cleanup,
            tx_hash_to_contents_len_after_cleanup,
            short_id_count_before_cleanup, short_id_count_after_cleanup)

        self._block_hash_marked_for_cleanup.discard(block_hash)
        self.node.post_block_cleanup_tasks(
            block_hash=block_hash,
            short_ids=block_short_ids,
            unknown_tx_hashes=block_unknown_tx_hashes)
def parse_bx_block_header(bx_block: memoryview, block_pieces: Deque[Union[bytearray, memoryview]]) -> \
        BlockHeaderInfo:
    block_offsets = compact_block_short_ids_serializer.get_bx_block_offsets(
        bx_block)
    short_ids, short_ids_len = compact_block_short_ids_serializer.deserialize_short_ids_from_buffer(
        bx_block, block_offsets.short_id_offset)

    reconstructed_block_message = BlockOntMessage(
        buf=bx_block[block_offsets.block_begin_offset +
                     ont_constants.ONT_HASH_LEN + 1:])
    block_hash = reconstructed_block_message.block_hash()
    txn_count = reconstructed_block_message.txn_count()
    offset = reconstructed_block_message.txn_offset(
    ) + block_offsets.block_begin_offset + ont_constants.ONT_HASH_LEN + 1

    # Add header piece
    block_pieces.append(bx_block[block_offsets.block_begin_offset +
                                 ont_constants.ONT_HASH_LEN + 1:offset])
    return BlockHeaderInfo(block_offsets, short_ids, short_ids_len, block_hash,
                           offset, txn_count)
    def msg_block(self, msg: BlockOntMessage) -> None:
        block_hash = msg.block_hash()

        if not self.node.should_process_block_hash(block_hash):
            return

        if self.node.block_cleanup_service.is_marked_for_cleanup(block_hash):
            self.connection.log_trace("Marked block for cleanup: {}",
                                      block_hash)
            self.node.block_cleanup_service.clean_block_transactions(
                transaction_service=self.node.get_tx_service(), block_msg=msg)
        else:
            self.process_msg_block(msg)

        # After receiving block message sending INV message for the same block to Ontology node
        # This is needed to update Synced Headers value of the gateway peer on the Ontology node
        # If Synced Headers is not up-to-date than Ontology node does not push compact blocks to the gateway
        inv_msg = InvOntMessage(magic=self.node.opts.blockchain_net_magic,
                                inv_type=InventoryOntType.MSG_BLOCK,
                                blocks=[block_hash])
        self.connection.enqueue_msg(inv_msg)
        self.node.update_current_block_height(msg.height(), block_hash)
    def clean_block_transactions(
            self, block_msg: BlockOntMessage,
            transaction_service: TransactionService) -> None:
        start_datetime = datetime.utcnow()
        start_time = time.time()
        tx_hash_to_contents_len_before_cleanup = transaction_service.get_tx_hash_to_contents_len(
        )
        cleanup_task = self.block_cleanup_tasks.borrow_task()
        tx_service = typing.cast(ExtensionTransactionService,
                                 transaction_service)
        cleanup_task.init(tpe.InputBytes(block_msg.buf), tx_service.proxy)
        init_time = time.time()
        task_pool_proxy.run_task(cleanup_task)
        task_run_time = time.time()
        unknown_tx_hashes_count = len(cleanup_task.unknown_tx_hashes())
        tx_property_fetch_time = time.time()
        short_ids = cleanup_task.short_ids()
        short_ids_fetch_time = time.time()
        short_ids_count = len(short_ids)
        tx_service.update_removed_transactions(
            cleanup_task.total_content_removed(), short_ids)
        remove_from_tx_service_time = time.time()
        # TODO : clean the short ids/transactions from the alarm queue after refactoring the transaction service
        block_hash = block_msg.block_hash()
        tx_service.on_block_cleaned_up(block_hash)
        tx_hash_to_contents_len_after_cleanup = transaction_service.get_tx_hash_to_contents_len(
        )
        end_datetime = datetime.utcnow()
        end_time = time.time()

        logger.statistics({
            "type":
            "BlockTransactionsCleanup",
            "block_hash":
            repr(block_hash),
            "unknown_tx_hashes_count":
            unknown_tx_hashes_count,
            "short_ids_count":
            short_ids_count,
            "block_transactions_count":
            cleanup_task.txn_count(),
            "start_datetime":
            start_datetime,
            "end_datetime":
            end_datetime,
            "task_init_time":
            init_time - start_time,
            "task_run_time":
            task_run_time - init_time,
            "tx_property_fetch_time":
            tx_property_fetch_time - task_run_time,
            "short_ids_fetch_time":
            short_ids_fetch_time - tx_property_fetch_time,
            "remove_from_tx_service_time":
            remove_from_tx_service_time - short_ids_fetch_time,
            "duration":
            end_time - start_time,
            "tx_hash_to_contents_len_before_cleanup":
            tx_hash_to_contents_len_before_cleanup,
            "tx_hash_to_contents_len_after_cleanup":
            tx_hash_to_contents_len_after_cleanup,
        })
        self.block_cleanup_tasks.return_task(cleanup_task)
        self._block_hash_marked_for_cleanup.discard(block_hash)
        self.node.post_block_cleanup_tasks(
            block_hash=block_hash,
            short_ids=short_ids,
            unknown_tx_hashes=(
                Sha256Hash(convert.hex_to_bytes(tx_hash.hex_string()))
                for tx_hash in cleanup_task.unknown_tx_hashes()))
    def block_to_bx_block(self, block_msg: BlockOntMessage, tx_service: TransactionService) -> \
            Tuple[memoryview, BlockInfo]:
        """
        Compresses a Ontology block's transactions and packs it into a bloXroute block.
        """
        compress_start_datetime = datetime.utcnow()
        compress_start_timestamp = time.time()
        size = 0
        buf = deque()
        short_ids = []
        header = block_msg.txn_header()
        size += len(header)
        buf.append(header)

        for tx in block_msg.txns():
            tx_hash, _ = ont_messages_util.get_txid(tx)
            short_id = tx_service.get_short_id(tx_hash)
            if short_id == constants.NULL_TX_SID:
                buf.append(tx)
                size += len(tx)
            else:
                short_ids.append(short_id)
                buf.append(ont_constants.ONT_SHORT_ID_INDICATOR_AS_BYTEARRAY)
                size += 1

        serialized_short_ids = compact_block_short_ids_serializer.serialize_short_ids_into_bytes(
            short_ids)
        buf.append(serialized_short_ids)
        size += constants.UL_ULL_SIZE_IN_BYTES

        merkle_root = block_msg.merkle_root()
        buf.appendleft(merkle_root)
        size += ont_constants.ONT_HASH_LEN

        is_consensus_msg_buf = struct.pack("?", False)
        buf.appendleft(is_consensus_msg_buf)
        size += 1

        offset_buf = struct.pack("<Q", size)
        buf.appendleft(offset_buf)
        size += len(serialized_short_ids)

        block = bytearray(size)
        off = 0
        for blob in buf:
            next_off = off + len(blob)
            block[off:next_off] = blob
            off = next_off

        prev_block_hash = convert.bytes_to_hex(
            block_msg.prev_block_hash().binary)
        bx_block_hash = convert.bytes_to_hex(crypto.double_sha256(block))
        original_size = len(block_msg.rawbytes())

        block_info = BlockInfo(block_msg.block_hash(), short_ids,
                               compress_start_datetime, datetime.utcnow(),
                               (time.time() - compress_start_timestamp) * 1000,
                               block_msg.txn_count(), bx_block_hash,
                               prev_block_hash, original_size, size,
                               100 - float(size) / original_size * 100)
        return memoryview(block), block_info