def block_to_bx_block(self, block_msg: BlockOntMessage, tx_service: ExtensionTransactionService) -> \
            Tuple[memoryview, BlockInfo]:
        compress_start_datetime = datetime.utcnow()
        compress_start_timestamp = time.time()
        self._default_block_size = max(self._default_block_size,
                                       len(block_msg.buf))
        tsk = self.compression_tasks.borrow_task()
        tsk.init(tpe.InputBytes(block_msg.buf), tx_service.proxy)
        try:
            task_pool_proxy.run_task(tsk)
        except tpe.AggregatedException as e:
            self.compression_tasks.return_task(tsk)
            raise message_conversion_error.btc_block_compression_error(
                block_msg.block_hash(), e)
        bx_block = tsk.bx_block()
        block = memoryview(bx_block)
        compressed_size = len(block)
        original_size = len(block_msg.rawbytes())
        block_hash = OntObjectHash(
            binary=convert.hex_to_bytes(tsk.block_hash().hex_string()))

        block_info = BlockInfo(
            block_hash, tsk.short_ids(), compress_start_datetime,
            datetime.utcnow(), (time.time() - compress_start_timestamp) * 1000,
            tsk.txn_count(),
            tsk.compressed_block_hash().hex_string(),
            tsk.prev_block_hash().hex_string(), original_size, compressed_size,
            100 - float(compressed_size) / original_size * 100)
        self.compression_tasks.return_task(tsk)
        return block, block_info
Пример #2
0
    def block_to_bx_block(self, block_msg: BlockOntMessage, tx_service: TransactionService) \
            -> Tuple[memoryview, BlockInfo]:
        start_datetime = datetime.utcnow()
        start_time = time.time()

        block_info = BlockInfo(
            block_msg.block_hash(),
            [],
            start_datetime,
            datetime.utcnow(),
            (time.time() - start_time) * 1000,
            block_msg.txn_count(),
            str(block_msg.block_hash()),
            convert.bytes_to_hex(block_msg.prev_block_hash().binary),
            len(block_msg.rawbytes()),
            len(block_msg.rawbytes()),
            0
        )
        return block_msg.rawbytes(), block_info
    def test_ont_block_to_bloxroute_block_and_back_sids_found(self):

        prev_block_hash = bytearray(crypto.bitcoin_hash(b"123"))
        prev_block = OntObjectHash(prev_block_hash, length=SHA256_HASH_LEN)
        merkle_root_hash = bytearray(crypto.bitcoin_hash(b"234"))
        merkle_root = OntObjectHash(merkle_root_hash, length=SHA256_HASH_LEN)
        txns_root_hash = bytearray(crypto.bitcoin_hash(b"345"))
        txns_root = OntObjectHash(txns_root_hash, length=SHA256_HASH_LEN)
        block_root_hash = bytearray(crypto.bitcoin_hash(b"456"))
        block_root = OntObjectHash(block_root_hash, length=SHA256_HASH_LEN)
        consensus_payload = bytes(b'111')
        next_bookkeeper = bytes(b'222')
        bookkeepers = [bytes(33)] * 5
        sig_data = [bytes(2)] * 3
        txns = []
        timestamp = 1
        height = 2
        consensus_data = 3

        ont_block = BlockOntMessage(self.magic, self.version, prev_block,
                                    txns_root, block_root, timestamp, height,
                                    consensus_data, consensus_payload,
                                    next_bookkeeper, bookkeepers, sig_data,
                                    txns, merkle_root)
        block_hash = ont_block.block_hash()
        bloxroute_block, block_info = self.ont_message_converter.block_to_bx_block(
            ont_block, self.tx_service, True, 0)
        self.assertEqual(0, block_info.txn_count)
        self.assertEqual(self.short_ids, list(block_info.short_ids))
        self.assertEqual(ont_block.block_hash(), block_info.block_hash)

        parsed_ont_block, block_info, _, _ = self.ont_message_converter.bx_block_to_block(
            bloxroute_block, self.tx_service)
        self.assertIsNotNone(block_info)
        self.assertEqual(ont_block.rawbytes().tobytes(),
                         parsed_ont_block.rawbytes().tobytes())
        self.assertEqual(self.magic, parsed_ont_block.magic())
        self.assertEqual(
            prev_block_hash,
            parsed_ont_block.prev_block_hash().get_little_endian())
        self.assertEqual(ont_block.checksum(), parsed_ont_block.checksum())
        self.assertEqual(block_hash, parsed_ont_block.block_hash())
        self.assertEqual(block_hash.binary, block_info.block_hash.binary)
        self.assertEqual(timestamp, parsed_ont_block.timestamp())
Пример #4
0
    def bx_block_to_block(self, bx_block_msg: memoryview, tx_service: TransactionService) -> BlockDecompressionResult:
        start_datetime = datetime.utcnow()
        start_time = time.time()

        block_msg = BlockOntMessage(buf=bytearray(bx_block_msg))

        block_info = BlockInfo(
            block_msg.block_hash(),
            [],
            start_datetime,
            datetime.utcnow(),
            (time.time() - start_time) * 1000,
            block_msg.txn_count(),
            str(block_msg.block_hash()),
            convert.bytes_to_hex(block_msg.prev_block_hash().binary),
            len(block_msg.rawbytes()),
            len(block_msg.rawbytes()),
            0
        )
        return BlockDecompressionResult(block_msg, block_info, [], [])
    def block_to_bx_block(self, block_msg: BlockOntMessage, tx_service: TransactionService) -> \
            Tuple[memoryview, BlockInfo]:
        """
        Compresses a Ontology block's transactions and packs it into a bloXroute block.
        """
        compress_start_datetime = datetime.utcnow()
        compress_start_timestamp = time.time()
        size = 0
        buf = deque()
        short_ids = []
        header = block_msg.txn_header()
        size += len(header)
        buf.append(header)

        for tx in block_msg.txns():
            tx_hash, _ = ont_messages_util.get_txid(tx)
            short_id = tx_service.get_short_id(tx_hash)
            if short_id == constants.NULL_TX_SID:
                buf.append(tx)
                size += len(tx)
            else:
                short_ids.append(short_id)
                buf.append(ont_constants.ONT_SHORT_ID_INDICATOR_AS_BYTEARRAY)
                size += 1

        serialized_short_ids = compact_block_short_ids_serializer.serialize_short_ids_into_bytes(
            short_ids)
        buf.append(serialized_short_ids)
        size += constants.UL_ULL_SIZE_IN_BYTES

        merkle_root = block_msg.merkle_root()
        buf.appendleft(merkle_root)
        size += ont_constants.ONT_HASH_LEN

        is_consensus_msg_buf = struct.pack("?", False)
        buf.appendleft(is_consensus_msg_buf)
        size += 1

        offset_buf = struct.pack("<Q", size)
        buf.appendleft(offset_buf)
        size += len(serialized_short_ids)

        block = bytearray(size)
        off = 0
        for blob in buf:
            next_off = off + len(blob)
            block[off:next_off] = blob
            off = next_off

        prev_block_hash = convert.bytes_to_hex(
            block_msg.prev_block_hash().binary)
        bx_block_hash = convert.bytes_to_hex(crypto.double_sha256(block))
        original_size = len(block_msg.rawbytes())

        block_info = BlockInfo(block_msg.block_hash(), short_ids,
                               compress_start_datetime, datetime.utcnow(),
                               (time.time() - compress_start_timestamp) * 1000,
                               block_msg.txn_count(), bx_block_hash,
                               prev_block_hash, original_size, size,
                               100 - float(size) / original_size * 100)
        return memoryview(block), block_info