Beispiel #1
0
    def test_propagate_block_to_network_unencrypted_block(self):
        self.node.opts.encrypt_blocks = False

        block_message = helpers.generate_bytearray(50)
        block_info = BlockInfo(
            Sha256Hash(helpers.generate_bytearray(crypto.SHA256_HASH_LEN)), [],
            datetime.datetime.utcnow(), datetime.datetime.utcnow(), 0, 1,
            helpers.generate_bytearray(crypto.SHA256_HASH_LEN),
            helpers.generate_bytearray(crypto.SHA256_HASH_LEN), 0, 0, 0, [])

        connection = MockConnection(
            MockSocketConnection(1, self.node, ip_address=LOCALHOST,
                                 port=9000), self.node)
        self.neutrality_service.propagate_block_to_network(
            block_message, connection, block_info)

        self.assertEqual(1, len(self.node.broadcast_messages))
        broadcast_message, connection_types = self.node.broadcast_messages[0]
        # self.assertTrue(any(ConnectionType.RELAY_BLOCK & connection_type for connection_type in connection_types))
        self.assertTrue(
            all(ConnectionType.RELAY_BLOCK in connection_type
                for connection_type in connection_types))
        self.assertEqual(block_info.block_hash, broadcast_message.block_hash())

        self.assertNotIn(block_info.block_hash,
                         self.node.in_progress_blocks._cache)
        self.assertNotIn(broadcast_message.block_hash(),
                         self.neutrality_service._receipt_tracker)
Beispiel #2
0
 def compact_block_to_bx_block(
     self, compact_block: CompactBlockBtcMessage,
     transaction_service: ExtensionTransactionService
 ) -> CompactBlockCompressionResult:
     compress_start_datetime = datetime.utcnow()
     tsk = self.compact_mapping_tasks.borrow_task()
     tsk.init(tpe.InputBytes(compact_block.buf), transaction_service.proxy,
              compact_block.magic())
     try:
         task_pool_proxy.run_task(tsk)
     except tpe.AggregatedException as e:
         self.compact_mapping_tasks.return_task(tsk)
         raise message_conversion_error.btc_compact_block_compression_error(
             compact_block.block_hash(), e)
     success = tsk.success()
     recovered_item = ExtensionCompactBlockRecoveryData(
         transaction_service, tsk)
     block_info = BlockInfo(compact_block.block_hash(), [],
                            compress_start_datetime,
                            compress_start_datetime, 0, None, None, None,
                            len(compact_block.rawbytes()), None, None, [])
     if success:
         result = CompactBlockCompressionResult(
             False, block_info, None, None, [],
             create_recovered_transactions())
         return self._extension_recovered_compact_block_to_bx_block(
             result, recovered_item)
     else:
         recovery_index = self._last_recovery_idx
         self._extension_recovered_items[recovery_index] = recovered_item
         self._last_recovery_idx += 1
         return CompactBlockCompressionResult(
             False, block_info, None, recovery_index, tsk.missing_indices(),
             create_recovered_transactions())
Beispiel #3
0
    def block_to_bx_block(
        self, block_msg: InternalEthBlockInfo,
        tx_service: ExtensionTransactionService
    ) -> Tuple[memoryview, BlockInfo]:
        compress_start_datetime = datetime.datetime.utcnow()
        compress_start_timestamp = time.time()
        self._default_block_size = max(self._default_block_size,
                                       len(block_msg.rawbytes()))
        tsk = self.compression_tasks.borrow_task()
        tsk.init(tpe.InputBytes(block_msg.rawbytes()), tx_service.proxy)
        try:
            task_pool_proxy.run_task(tsk)
        except tpe.AggregatedException as e:
            self.compression_tasks.return_task(tsk)
            raise message_conversion_error.eth_block_compression_error(
                block_msg.block_hash(), e)
        bx_block = tsk.bx_block()
        starting_offset = tsk.starting_offset()
        block = memoryview(bx_block)[starting_offset:]
        compressed_size = len(block)
        original_size = len(block_msg.rawbytes()) - starting_offset
        block_hash = block_msg.block_hash()

        block_info = BlockInfo(
            block_hash, tsk.short_ids(), compress_start_datetime,
            datetime.datetime.utcnow(),
            (time.time() - compress_start_timestamp) * 1000, tsk.txn_count(),
            tsk.compressed_block_hash().hex_string(),
            tsk.prev_block_hash().hex_string(), original_size, compressed_size,
            100 - float(compressed_size) / original_size * 100)
        self.compression_tasks.return_task(tsk)
        return block, block_info
Beispiel #4
0
    def block_to_bx_block(
            self, block_msg, tx_service, enable_block_compression: bool,
            min_tx_age_seconds: float) -> Tuple[memoryview, BlockInfo]:
        compress_start_datetime = datetime.utcnow()
        compress_start_timestamp = time.time()
        self._default_block_size = max(self._default_block_size,
                                       len(block_msg.buf))
        tsk = self.compression_tasks.borrow_task()
        tsk.init(tpe.InputBytes(block_msg.buf), tx_service.proxy,
                 enable_block_compression, min_tx_age_seconds)
        try:
            task_pool_proxy.run_task(tsk)
        except tpe.AggregatedException as e:
            self.compression_tasks.return_task(tsk)
            raise message_conversion_error.btc_block_compression_error(
                block_msg.block_hash(), e)
        bx_block = tsk.bx_block()
        block = memoryview(bx_block)
        compressed_size = len(block)
        original_size = len(block_msg.rawbytes())
        block_hash = OntObjectHash(
            binary=convert.hex_to_bytes(tsk.block_hash().hex_string()))

        block_info = BlockInfo(
            block_hash, tsk.short_ids(), compress_start_datetime,
            datetime.utcnow(), (time.time() - compress_start_timestamp) * 1000,
            tsk.txn_count(),
            tsk.compressed_block_hash().hex_string(),
            tsk.prev_block_hash().hex_string(), original_size, compressed_size,
            100 - float(compressed_size) / original_size * 100,
            tsk.ignored_short_ids())
        self.compression_tasks.return_task(tsk)
        return block, block_info
 def recovered_compact_block_to_bx_block(
     self,
     failed_compression_result: CompactBlockCompressionResult,
 ) -> CompactBlockCompressionResult:
     failed_block_info = failed_compression_result.block_info
     start_datetime = datetime.utcnow()
     block_info = BlockInfo(
         failed_block_info.block_hash,  # pyre-ignore
         failed_block_info.short_ids,  # pyre-ignore
         start_datetime,
         start_datetime,
         0,
         None,
         None,
         None,
         failed_block_info.original_size,  # pyre-ignore
         None,
         None,
         [])
     failed_compression_result.block_info = block_info
     return self._recovered_compact_block_to_bx_block(
         failed_compression_result,
         self._recovery_items.pop(
             failed_compression_result.recovery_index)  # pyre-ignore
     )
Beispiel #6
0
def get_block_info(
        bx_block: memoryview,
        block_hash: Sha256Hash,
        short_ids: List[int],
        decompress_start_datetime: datetime,
        decompress_start_timestamp: float,
        total_tx_count: Optional[int] = None,
        btc_block_msg: Optional[BlockBtcMessage] = None
) -> BlockInfo:
    if btc_block_msg is not None:
        bx_block_hash = convert.bytes_to_hex(crypto.double_sha256(bx_block))
        compressed_size = len(bx_block)
        prev_block_hash = convert.bytes_to_hex(btc_block_msg.prev_block_hash().binary)
        btc_block_len = len(btc_block_msg.rawbytes())
        compression_rate = 100 - float(compressed_size) / btc_block_len * 100
    else:
        bx_block_hash = None
        compressed_size = None
        prev_block_hash = None
        btc_block_len = None
        compression_rate = None
    return BlockInfo(
        block_hash,
        short_ids,
        decompress_start_datetime,
        datetime.utcnow(),
        (time.time() - decompress_start_timestamp) * 1000,
        total_tx_count,
        bx_block_hash,
        prev_block_hash,
        btc_block_len,
        compressed_size,
        compression_rate
    )
    def block_to_bx_block(
            self, block_msg, tx_service
    ) -> Tuple[memoryview, BlockInfo]:
        """
        Compresses a Bitcoin block's transactions and packs it into a bloXroute block.
        """
        compress_start_datetime = datetime.utcnow()
        compress_start_timestamp = time.time()
        size = 0
        buf = deque()
        short_ids = []
        header = block_msg.header()
        size += len(header)
        buf.append(header)

        for tx in block_msg.txns():

            tx_hash = btc_common_utils.get_txid(tx)
            short_id = tx_service.get_short_id(tx_hash)
            if short_id == constants.NULL_TX_SID:
                buf.append(tx)
                size += len(tx)
            else:
                short_ids.append(short_id)
                buf.append(btc_constants.BTC_SHORT_ID_INDICATOR_AS_BYTEARRAY)
                size += 1

        serialized_short_ids = compact_block_short_ids_serializer.serialize_short_ids_into_bytes(short_ids)
        buf.append(serialized_short_ids)
        size += constants.UL_ULL_SIZE_IN_BYTES
        offset_buf = struct.pack("<Q", size)
        buf.appendleft(offset_buf)
        size += len(serialized_short_ids)

        block = bytearray(size)
        off = 0
        for blob in buf:
            next_off = off + len(blob)
            block[off:next_off] = blob
            off = next_off

        prev_block_hash = convert.bytes_to_hex(block_msg.prev_block_hash().binary)
        bx_block_hash = convert.bytes_to_hex(crypto.double_sha256(block))
        original_size = len(block_msg.rawbytes())

        block_info = BlockInfo(
            block_msg.block_hash(),
            short_ids,
            compress_start_datetime,
            datetime.utcnow(),
            (time.time() - compress_start_timestamp) * 1000,
            block_msg.txn_count(),
            bx_block_hash,
            prev_block_hash,
            original_size,
            size,
            100 - float(size) / original_size * 100
        )
        return memoryview(block), block_info
    def block_to_bx_block(
            self, block_msg, tx_service, enable_block_compression: bool,
            min_tx_age_seconds: float) -> Tuple[memoryview, BlockInfo]:
        """
        Compresses a Bitcoin block's transactions and packs it into a bloXroute block.
        """
        compress_start_datetime = datetime.utcnow()
        compress_start_timestamp = time.time()
        size = 0
        buf = deque()
        short_ids = []
        original_size = len(block_msg.rawbytes())
        ignored_sids = []

        header = block_msg.header()
        size += len(header)
        buf.append(header)

        max_timestamp_for_compression = time.time() - min_tx_age_seconds

        for tx in block_msg.txns():
            tx_hash = btc_common_utils.get_txid(tx)
            transaction_key = tx_service.get_transaction_key(tx_hash)
            short_id = tx_service.get_short_id_by_key(transaction_key)

            short_id_assign_time = 0
            if short_id != constants.NULL_TX_SID:
                short_id_assign_time = tx_service.get_short_id_assign_time(
                    short_id)

            if short_id == constants.NULL_TX_SID or \
                    not enable_block_compression or \
                    short_id_assign_time > max_timestamp_for_compression:
                if short_id != constants.NULL_TX_SIDS:
                    ignored_sids.append(short_id)
                buf.append(tx)
                size += len(tx)
            else:
                short_ids.append(short_id)
                buf.append(btc_constants.BTC_SHORT_ID_INDICATOR_AS_BYTEARRAY)
                size += 1

        block = finalize_block_bytes(buf, size, short_ids)

        prev_block_hash = convert.bytes_to_hex(
            block_msg.prev_block_hash().binary)
        bx_block_hash = convert.bytes_to_hex(crypto.double_sha256(block))

        block_info = BlockInfo(block_msg.block_hash(), short_ids,
                               compress_start_datetime, datetime.utcnow(),
                               (time.time() - compress_start_timestamp) * 1000,
                               block_msg.txn_count(), bx_block_hash,
                               prev_block_hash, original_size, size,
                               100 - float(size) / original_size * 100,
                               ignored_sids)

        return memoryview(block), block_info
Beispiel #9
0
    def block_to_bx_block(self, block_msg: BlockOntMessage, tx_service: TransactionService) \
            -> Tuple[memoryview, BlockInfo]:
        start_datetime = datetime.utcnow()
        start_time = time.time()

        block_info = BlockInfo(
            block_msg.block_hash(),
            [],
            start_datetime,
            datetime.utcnow(),
            (time.time() - start_time) * 1000,
            block_msg.txn_count(),
            str(block_msg.block_hash()),
            convert.bytes_to_hex(block_msg.prev_block_hash().binary),
            len(block_msg.rawbytes()),
            len(block_msg.rawbytes()),
            0
        )
        return block_msg.rawbytes(), block_info
 def _extension_recovered_compact_block_to_bx_block(
     self,
     mapping_result: CompactBlockCompressionResult,
     recovery_item: ExtensionCompactBlockRecoveryData
 ) -> CompactBlockCompressionResult:
     mapping_task = recovery_item.mapping_task
     compression_task: tpe.BtcCompactBlockCompressionTask = mapping_task.compression_task()
     # pyre-fixme[16]: `List` has no attribute `vector`.
     compression_task.add_recovered_transactions(mapping_result.recovered_transactions.vector)
     mapping_block_info = mapping_result.block_info
     try:
         task_pool_proxy.run_task(compression_task)
     except tpe.AggregatedException as e:
         self.compact_mapping_tasks.return_task(mapping_task)
         # pyre-fixme[16]: `Optional` has no attribute `block_hash`.
         raise message_conversion_error.btc_compact_block_compression_error(mapping_block_info.block_hash, e)
     bx_block = memoryview(compression_task.bx_block())
     block_hash = mapping_block_info.block_hash
     txn_count = compression_task.txn_count()
     compressed_block_hash = compression_task.compressed_block_hash().hex_string()
     prev_block_hash = compression_task.prev_block_hash().hex_string()
     short_ids = compression_task.short_ids()
     compress_end_datetime = datetime.utcnow()
     # pyre-fixme[16]: `Optional` has no attribute `start_datetime`.
     compress_start_datetime = mapping_block_info.start_datetime
     # pyre-fixme[16]: `Optional` has no attribute `original_size`.
     original_size = mapping_block_info.original_size
     compressed_size = len(bx_block)
     block_info = BlockInfo(
         block_hash,
         short_ids,
         compress_start_datetime,
         compress_end_datetime,
         (compress_end_datetime - compress_start_datetime).total_seconds() * 1000,
         txn_count,
         compressed_block_hash,
         prev_block_hash,
         original_size,
         compressed_size,
         100 - float(compressed_size) / original_size * 100
     )
     self.compact_mapping_tasks.return_task(mapping_task)
     return CompactBlockCompressionResult(True, block_info, bx_block, None, [], [])
Beispiel #11
0
    def bx_block_to_block(self, bx_block_msg: memoryview, tx_service: TransactionService) -> BlockDecompressionResult:
        start_datetime = datetime.utcnow()
        start_time = time.time()

        block_msg = BlockOntMessage(buf=bytearray(bx_block_msg))

        block_info = BlockInfo(
            block_msg.block_hash(),
            [],
            start_datetime,
            datetime.utcnow(),
            (time.time() - start_time) * 1000,
            block_msg.txn_count(),
            str(block_msg.block_hash()),
            convert.bytes_to_hex(block_msg.prev_block_hash().binary),
            len(block_msg.rawbytes()),
            len(block_msg.rawbytes()),
            0
        )
        return BlockDecompressionResult(block_msg, block_info, [], [])
    def bx_block_to_block(self, bx_block_msg, tx_service) -> BlockDecompressionResult:
        start_datetime = datetime.datetime.utcnow()
        start_time = time.time()
        block_msg = BlockBtcMessage(buf=bx_block_msg)

        block_info = BlockInfo(
            block_msg.block_hash(),
            [],
            start_datetime,
            datetime.datetime.utcnow(),
            (time.time() - start_time) * 1000,
            block_msg.txn_count(),
            # pyre-fixme[6]: Expected `Optional[str]` for 7th param but got
            #  `BtcObjectHash`.
            block_msg.block_hash(),
            convert.bytes_to_hex(block_msg.prev_block_hash().binary),
            len(block_msg.rawbytes()),
            len(block_msg.rawbytes()),
            0
        )
        return BlockDecompressionResult(block_msg, block_info, [], [])
    def block_to_bx_block(self, block_msg, tx_service) -> Tuple[memoryview, BlockInfo]:
        start_datetime = datetime.datetime.utcnow()
        start_time = time.time()

        block_msg = cast(BlockBtcMessage, block_msg)

        block_info = BlockInfo(
            block_msg.block_hash(),
            [],
            start_datetime,
            datetime.datetime.utcnow(),
            (time.time() - start_time) * 1000,
            block_msg.txn_count(),
            # pyre-fixme[6]: Expected `Optional[str]` for 7th param but got
            #  `BtcObjectHash`.
            block_msg.block_hash(),
            convert.bytes_to_hex(block_msg.prev_block_hash().binary),
            len(block_msg.rawbytes()),
            len(block_msg.rawbytes()),
            0
        )
        return block_msg.rawbytes(), block_info
Beispiel #14
0
 def recovered_compact_block_to_bx_block(  # pyre-ignore
     self,
     failed_mapping_result: CompactBlockCompressionResult,
 ) -> CompactBlockCompressionResult:
     failed_block_info = failed_mapping_result.block_info
     block_info = BlockInfo(
         failed_block_info.block_hash,  # pyre-ignore
         failed_block_info.short_ids,  # pyre-ignore
         datetime.utcnow(),
         datetime.utcnow(),
         0,
         None,
         None,
         None,
         failed_block_info.original_size,  # pyre-ignore
         None,
         None,
         [])
     failed_mapping_result.block_info = block_info
     recovered_item = self._extension_recovered_items.pop(
         failed_mapping_result.recovery_index)  # pyre-ignore
     return self._extension_recovered_compact_block_to_bx_block(
         failed_mapping_result, recovered_item)
Beispiel #15
0
    def block_to_bx_block(
            self, block_msg: InternalEthBlockInfo,
            tx_service: TransactionService, enable_block_compression: bool,
            min_tx_age_seconds: float) -> Tuple[memoryview, BlockInfo]:
        """
        Convert Ethereum new block message to internal broadcast message with transactions replaced with short ids

        The code is optimized and does not make copies of bytes

        :param block_msg: Ethereum new block message
        :param tx_service: Transactions service
        :param enable_block_compression
        :param min_tx_age_seconds
        :return: Internal broadcast message bytes (bytearray), tuple (txs count, previous block hash)
        """

        compress_start_datetime = datetime.datetime.utcnow()
        compress_start_timestamp = time.time()

        txs_bytes, block_hdr_full_bytes, remaining_bytes, prev_block_bytes = parse_block_message(
            block_msg)

        used_short_ids = []

        # creating transactions content
        content_size = 0
        buf = deque()
        ignored_sids = []

        tx_start_index = 0
        tx_count = 0
        original_size = len(block_msg.rawbytes())
        max_timestamp_for_compression = time.time() - min_tx_age_seconds

        while True:
            if tx_start_index >= len(txs_bytes):
                break

            _, tx_item_length, tx_item_start = rlp_utils.consume_length_prefix(
                txs_bytes, tx_start_index)
            tx_bytes = txs_bytes[tx_start_index:tx_item_start + tx_item_length]
            tx_hash_bytes = eth_common_utils.keccak_hash(tx_bytes)
            tx_hash = Sha256Hash(tx_hash_bytes)
            tx_key = tx_service.get_transaction_key(tx_hash)
            short_id = tx_service.get_short_id_by_key(tx_key)
            short_id_assign_time = 0

            if short_id != constants.NULL_TX_SID:
                short_id_assign_time = tx_service.get_short_id_assign_time(
                    short_id)

            if short_id <= constants.NULL_TX_SID or \
                    not enable_block_compression or short_id_assign_time > max_timestamp_for_compression:
                if short_id > constants.NULL_TX_SID:
                    ignored_sids.append(short_id)
                is_full_tx_bytes = rlp_utils.encode_int(1)
                tx_content_bytes = tx_bytes
            else:
                is_full_tx_bytes = rlp_utils.encode_int(0)
                used_short_ids.append(short_id)
                tx_content_bytes = bytes()

            tx_content_prefix = rlp_utils.get_length_prefix_str(
                len(tx_content_bytes))

            short_tx_content_size = len(is_full_tx_bytes) + len(
                tx_content_prefix) + len(tx_content_bytes)

            short_tx_content_prefix_bytes = rlp_utils.get_length_prefix_list(
                short_tx_content_size)

            buf.append(short_tx_content_prefix_bytes)
            buf.append(is_full_tx_bytes)
            buf.append(tx_content_prefix)
            buf.append(tx_content_bytes)

            content_size += len(
                short_tx_content_prefix_bytes) + short_tx_content_size

            tx_start_index = tx_item_start + tx_item_length

            tx_count += 1

        list_of_txs_prefix_bytes = rlp_utils.get_length_prefix_list(
            content_size)
        buf.appendleft(list_of_txs_prefix_bytes)
        content_size += len(list_of_txs_prefix_bytes)

        buf.appendleft(block_hdr_full_bytes)
        content_size += len(block_hdr_full_bytes)

        buf.append(remaining_bytes)
        content_size += len(remaining_bytes)

        compact_block_msg_prefix = rlp_utils.get_length_prefix_list(
            content_size)
        buf.appendleft(compact_block_msg_prefix)
        content_size += len(compact_block_msg_prefix)

        block = finalize_block_bytes(buf, content_size, used_short_ids)
        bx_block_hash = convert.bytes_to_hex(crypto.double_sha256(block))

        block_info = BlockInfo(block_msg.block_hash(), used_short_ids,
                               compress_start_datetime,
                               datetime.datetime.utcnow(),
                               (time.time() - compress_start_timestamp) * 1000,
                               tx_count, bx_block_hash,
                               convert.bytes_to_hex(prev_block_bytes),
                               original_size, content_size,
                               100 - float(content_size) / original_size * 100,
                               ignored_sids)

        return memoryview(block), block_info
Beispiel #16
0
    def bx_block_to_block(self, bx_block_msg,
                          tx_service) -> BlockDecompressionResult:
        """
        Converts internal broadcast message to Ethereum new block message

        The code is optimized and does not make copies of bytes

        :param bx_block_msg: internal broadcast message bytes
        :param tx_service: Transactions service
        :return: tuple (new block message, block hash, unknown transaction short id, unknown transaction hashes)
        """

        if not isinstance(bx_block_msg, (bytearray, memoryview)):
            raise TypeError(
                "Type bytearray is expected for arg block_bytes but was {0}".
                format(type(bx_block_msg)))

        decompress_start_datetime = datetime.datetime.utcnow()
        decompress_start_timestamp = time.time()

        block_msg_bytes = bx_block_msg if isinstance(
            bx_block_msg, memoryview) else memoryview(bx_block_msg)

        block_offsets = compact_block_short_ids_serializer.get_bx_block_offsets(
            bx_block_msg)
        short_ids, short_ids_bytes_len = compact_block_short_ids_serializer.deserialize_short_ids_from_buffer(
            bx_block_msg, block_offsets.short_id_offset)

        block_bytes = block_msg_bytes[
            block_offsets.block_begin_offset:block_offsets.short_id_offset]

        _, block_itm_len, block_itm_start = rlp_utils.consume_length_prefix(
            block_bytes, 0)
        block_itm_bytes = block_bytes[block_itm_start:]

        _, block_hdr_len, block_hdr_start = rlp_utils.consume_length_prefix(
            block_itm_bytes, 0)
        full_hdr_bytes = block_itm_bytes[0:block_hdr_start + block_hdr_len]

        block_hash_bytes = eth_common_utils.keccak_hash(full_hdr_bytes)
        block_hash = Sha256Hash(block_hash_bytes)

        _, block_txs_len, block_txs_start = rlp_utils.consume_length_prefix(
            block_itm_bytes, block_hdr_start + block_hdr_len)
        txs_bytes = block_itm_bytes[block_txs_start:block_txs_start +
                                    block_txs_len]

        remaining_bytes = block_itm_bytes[block_txs_start + block_txs_len:]

        # parse statistics variables
        short_tx_index = 0
        unknown_tx_sids = []
        unknown_tx_hashes = []

        # creating transactions content
        content_size = 0
        buf = deque()
        tx_count = 0

        tx_start_index = 0

        while True:
            if tx_start_index >= len(txs_bytes):
                break

            _, tx_itm_len, tx_itm_start = rlp_utils.consume_length_prefix(
                txs_bytes, tx_start_index)
            tx_bytes = txs_bytes[tx_itm_start:tx_itm_start + tx_itm_len]

            is_full_tx_start = 0
            is_full_tx, is_full_tx_len, = rlp_utils.decode_int(
                tx_bytes, is_full_tx_start)

            _, tx_content_len, tx_content_start = rlp_utils.consume_length_prefix(
                tx_bytes, is_full_tx_start + is_full_tx_len)
            tx_content_bytes = tx_bytes[tx_content_start:tx_content_start +
                                        tx_content_len]
            if is_full_tx:
                tx_bytes = tx_content_bytes
            else:
                short_id = short_ids[short_tx_index]
                tx_hash, tx_bytes, _ = tx_service.get_transaction(short_id)

                if tx_hash is None:
                    unknown_tx_sids.append(short_id)
                elif tx_bytes is None:
                    unknown_tx_hashes.append(tx_hash)

                short_tx_index += 1

            if tx_bytes is not None and not unknown_tx_sids and not unknown_tx_hashes:
                buf.append(tx_bytes)
                content_size += len(tx_bytes)

            tx_count += 1

            tx_start_index = tx_itm_start + tx_itm_len

        if not unknown_tx_sids and not unknown_tx_hashes:

            txs_prefix = rlp_utils.get_length_prefix_list(content_size)
            buf.appendleft(txs_prefix)
            content_size += len(txs_prefix)

            buf.appendleft(full_hdr_bytes)
            content_size += len(full_hdr_bytes)

            buf.append(remaining_bytes)
            content_size += len(remaining_bytes)

            msg_len_prefix = rlp_utils.get_length_prefix_list(content_size)
            buf.appendleft(msg_len_prefix)

            block_msg_bytes = bytearray(content_size)
            off = 0
            for blob in buf:
                next_off = off + len(blob)
                block_msg_bytes[off:next_off] = blob
                off = next_off

            block_msg = InternalEthBlockInfo(block_msg_bytes)
            logger.debug(
                "Successfully parsed block broadcast message. {} "
                "transactions in block {}", tx_count, block_hash)

            bx_block_hash = convert.bytes_to_hex(
                crypto.double_sha256(bx_block_msg))
            compressed_size = len(bx_block_msg)

            block_info = BlockInfo(
                block_hash, short_ids, decompress_start_datetime,
                datetime.datetime.utcnow(),
                (time.time() - decompress_start_timestamp) * 1000, tx_count,
                bx_block_hash,
                convert.bytes_to_hex(block_msg.prev_block_hash().binary),
                len(block_msg.rawbytes()), compressed_size,
                100 - float(compressed_size) / content_size * 100, [])

            return BlockDecompressionResult(block_msg, block_info,
                                            unknown_tx_sids, unknown_tx_hashes)
        else:
            logger.debug(
                "Block recovery needed for {}. Missing {} sids, {} tx hashes. "
                "Total txs in block: {}", block_hash, len(unknown_tx_sids),
                len(unknown_tx_hashes), tx_count)

            return BlockDecompressionResult(
                None,
                BlockInfo(block_hash, short_ids, decompress_start_datetime,
                          datetime.datetime.utcnow(),
                          (time.time() - decompress_start_timestamp) * 1000,
                          None, None, None, None, None, None, []),
                unknown_tx_sids, unknown_tx_hashes)
Beispiel #17
0
    def block_to_bx_block(
            self, block_msg: InternalEthBlockInfo,
            tx_service: TransactionService) -> Tuple[memoryview, BlockInfo]:
        """
        Convert Ethereum new block message to internal broadcast message with transactions replaced with short ids

        The code is optimized and does not make copies of bytes

        :param block_msg: Ethereum new block message
        :param tx_service: Transactions service
        :return: Internal broadcast message bytes (bytearray), tuple (txs count, previous block hash)
        """

        compress_start_datetime = datetime.datetime.utcnow()
        compress_start_timestamp = time.time()
        msg_bytes = memoryview(block_msg.rawbytes())

        _, block_msg_itm_len, block_msg_itm_start = rlp_utils.consume_length_prefix(
            msg_bytes, 0)

        block_msg_bytes = msg_bytes[block_msg_itm_start:block_msg_itm_start +
                                    block_msg_itm_len]

        _, block_hdr_itm_len, block_hdr_itm_start = rlp_utils.consume_length_prefix(
            block_msg_bytes, 0)
        block_hdr_full_bytes = block_msg_bytes[0:block_hdr_itm_start +
                                               block_hdr_itm_len]
        block_hdr_bytes = block_msg_bytes[
            block_hdr_itm_start:block_hdr_itm_start + block_hdr_itm_len]

        _, prev_block_itm_len, prev_block_itm_start = rlp_utils.consume_length_prefix(
            block_hdr_bytes, 0)
        prev_block_bytes = block_hdr_bytes[
            prev_block_itm_start:prev_block_itm_start + prev_block_itm_len]

        _, txs_itm_len, txs_itm_start = rlp_utils.consume_length_prefix(
            block_msg_bytes, block_hdr_itm_start + block_hdr_itm_len)
        txs_bytes = block_msg_bytes[txs_itm_start:txs_itm_start + txs_itm_len]

        remaining_bytes = block_msg_bytes[txs_itm_start + txs_itm_len:]

        used_short_ids = []

        # creating transactions content
        content_size = 0
        buf = deque()

        tx_start_index = 0
        tx_count = 0

        while True:
            if tx_start_index >= len(txs_bytes):
                break

            _, tx_item_length, tx_item_start = rlp_utils.consume_length_prefix(
                txs_bytes, tx_start_index)
            tx_bytes = txs_bytes[tx_start_index:tx_item_start + tx_item_length]
            tx_hash_bytes = eth_common_utils.keccak_hash(tx_bytes)
            tx_hash = Sha256Hash(tx_hash_bytes)
            short_id = tx_service.get_short_id(tx_hash)

            if short_id <= 0:
                is_full_tx_bytes = rlp_utils.encode_int(1)
                tx_content_bytes = tx_bytes
            else:
                is_full_tx_bytes = rlp_utils.encode_int(0)
                used_short_ids.append(short_id)
                tx_content_bytes = bytes()

            tx_content_prefix = rlp_utils.get_length_prefix_str(
                len(tx_content_bytes))

            short_tx_content_size = len(is_full_tx_bytes) + len(
                tx_content_prefix) + len(tx_content_bytes)

            short_tx_content_prefix_bytes = rlp_utils.get_length_prefix_list(
                short_tx_content_size)

            buf.append(short_tx_content_prefix_bytes)
            buf.append(is_full_tx_bytes)
            buf.append(tx_content_prefix)
            buf.append(tx_content_bytes)

            content_size += len(
                short_tx_content_prefix_bytes) + short_tx_content_size

            tx_start_index = tx_item_start + tx_item_length

            tx_count += 1

        list_of_txs_prefix_bytes = rlp_utils.get_length_prefix_list(
            content_size)
        buf.appendleft(list_of_txs_prefix_bytes)
        content_size += len(list_of_txs_prefix_bytes)

        buf.appendleft(block_hdr_full_bytes)
        content_size += len(block_hdr_full_bytes)

        buf.append(remaining_bytes)
        content_size += len(remaining_bytes)

        compact_block_msg_prefix = rlp_utils.get_length_prefix_list(
            content_size)
        buf.appendleft(compact_block_msg_prefix)
        content_size += len(compact_block_msg_prefix)

        short_ids_bytes = compact_block_short_ids_serializer.serialize_short_ids_into_bytes(
            used_short_ids)
        buf.append(short_ids_bytes)
        content_size += constants.UL_ULL_SIZE_IN_BYTES
        offset_buf = struct.pack("<Q", content_size)
        buf.appendleft(offset_buf)
        content_size += len(short_ids_bytes)

        # Parse it into the bloXroute message format and send it along
        block = bytearray(content_size)
        off = 0
        for blob in buf:
            next_off = off + len(blob)
            block[off:next_off] = blob
            off = next_off

        bx_block_hash = convert.bytes_to_hex(crypto.double_sha256(block))
        original_size = len(block_msg.rawbytes())

        block_info = BlockInfo(block_msg.block_hash(), used_short_ids,
                               compress_start_datetime,
                               datetime.datetime.utcnow(),
                               (time.time() - compress_start_timestamp) * 1000,
                               tx_count, bx_block_hash,
                               convert.bytes_to_hex(prev_block_bytes),
                               original_size, content_size,
                               100 - float(content_size) / original_size * 100)
        return memoryview(block), block_info
    def block_to_bx_block(
            self, block_msg, tx_service, enable_block_compression: bool,
            min_tx_age_seconds: float) -> Tuple[memoryview, BlockInfo]:
        """
        Pack an Ontology block's transactions into a bloXroute block.
        """
        compress_start_datetime = datetime.utcnow()
        compress_start_timestamp = time.time()
        size = 0
        buf = deque()
        short_ids = []
        original_size = len(block_msg.rawbytes())

        header = block_msg.txn_header()
        size += len(header)
        buf.append(header)
        max_timestamp_for_compression = time.time() - min_tx_age_seconds
        ignored_sids = []

        for tx in block_msg.txns():
            tx_hash, _ = ont_messages_util.get_txid(tx)
            transaction_key = tx_service.get_transaction_key(tx_hash)
            short_id = tx_service.get_short_id_by_key(transaction_key)
            short_id_assign_time = 0

            if short_id != constants.NULL_TX_SID:
                short_id_assign_time = tx_service.get_short_id_assign_time(
                    short_id)

            if short_id == constants.NULL_TX_SID or \
                    not enable_block_compression or \
                    short_id_assign_time > max_timestamp_for_compression:
                if short_id != constants.NULL_TX_SIDS:
                    ignored_sids.append(ignored_sids)
                buf.append(tx)
                size += len(tx)
            else:
                short_ids.append(short_id)
                buf.append(ont_constants.ONT_SHORT_ID_INDICATOR_AS_BYTEARRAY)
                size += 1

        serialized_short_ids = compact_block_short_ids_serializer.serialize_short_ids_into_bytes(
            short_ids)
        buf.append(serialized_short_ids)
        size += constants.UL_ULL_SIZE_IN_BYTES

        merkle_root = block_msg.merkle_root()
        buf.appendleft(merkle_root)
        size += ont_constants.ONT_HASH_LEN

        is_consensus_msg_buf = struct.pack("?", False)
        buf.appendleft(is_consensus_msg_buf)
        size += 1

        offset_buf = struct.pack("<Q", size)
        buf.appendleft(offset_buf)
        size += len(serialized_short_ids)

        block = bytearray(size)
        off = 0
        for blob in buf:
            next_off = off + len(blob)
            block[off:next_off] = blob
            off = next_off

        prev_block_hash = convert.bytes_to_hex(
            block_msg.prev_block_hash().binary)
        bx_block_hash = convert.bytes_to_hex(crypto.double_sha256(block))

        block_info = BlockInfo(block_msg.block_hash(), short_ids,
                               compress_start_datetime, datetime.utcnow(),
                               (time.time() - compress_start_timestamp) * 1000,
                               block_msg.txn_count(), bx_block_hash,
                               prev_block_hash, original_size, size,
                               100 - float(size) / original_size * 100,
                               ignored_sids)

        return memoryview(block), block_info
Beispiel #19
0
 def block_to_bx_block(
         self, block_msg, tx_service, enable_block_compression: bool,
         min_tx_age_seconds: float) -> Tuple[memoryview, BlockInfo]:
     return block_msg.rawbytes(), \
            BlockInfo(convert.bytes_to_hex(self.PREV_BLOCK.binary), [], datetime.datetime.utcnow(),
                      datetime.datetime.utcnow(), 0, 0, None, None, 0, 0, 0, [])
    def _recovered_compact_block_to_bx_block(
        self, compression_result: CompactBlockCompressionResult,
        recovery_item: CompactBlockRecoveryData
    ) -> CompactBlockCompressionResult:
        """
        Handle recovery of Bitcoin compact block message.
        """

        missing_indices = compression_result.missing_indices
        recovered_transactions = compression_result.recovered_transactions
        block_transactions = recovery_item.block_transactions
        if len(missing_indices) != len(recovered_transactions):
            logger.debug(
                "Number of transactions missing in compact block does not match number of recovered transactions."
                "Missing transactions - {}. Recovered transactions - {}",
                len(missing_indices), len(recovered_transactions))
            return CompactBlockCompressionResult(False, None, None, None,
                                                 missing_indices,
                                                 recovered_transactions)

        for i in range(len(missing_indices)):
            missing_index = missing_indices[i]
            block_transactions[missing_index] = recovered_transactions[i]

        size = 0
        total_txs_count = len(block_transactions)
        block_msg_parts = deque()

        block_header = recovery_item.block_header
        block_msg_parts.append(block_header)
        size += len(block_header)

        tx_count_size = btc_messages_util.get_sizeof_btc_varint(
            total_txs_count)
        tx_count_buf = bytearray(tx_count_size)
        btc_messages_util.pack_int_to_btc_varint(total_txs_count, tx_count_buf,
                                                 0)
        block_msg_parts.append(tx_count_buf)
        size += tx_count_size

        for transaction in block_transactions:
            block_msg_parts.append(transaction)
            size += len(transaction)  # pyre-ignore

        msg_header = bytearray(btc_constants.BTC_HDR_COMMON_OFF)
        struct.pack_into("<L12sL", msg_header, 0, recovery_item.magic,
                         BtcMessageType.BLOCK, size)
        block_msg_parts.appendleft(msg_header)
        size += btc_constants.BTC_HDR_COMMON_OFF

        block_msg_bytes = bytearray(size)
        off = 0
        for blob in block_msg_parts:
            next_off = off + len(blob)
            block_msg_bytes[off:next_off] = blob
            off = next_off

        checksum = crypto.bitcoin_hash(
            block_msg_bytes[btc_constants.BTC_HDR_COMMON_OFF:size])
        block_msg_bytes[btc_constants.BTC_HEADER_MINUS_CHECKSUM:btc_constants.
                        BTC_HDR_COMMON_OFF] = checksum[0:4]
        bx_block, compression_block_info = self.block_to_bx_block(
            BlockBtcMessage(buf=block_msg_bytes), recovery_item.tx_service,
            True, 0)  # TODO need to think about a better algorithm
        compress_start_datetime = compression_block_info.start_datetime
        compress_end_datetime = datetime.utcnow()
        block_info = BlockInfo(
            compression_block_info.block_hash,
            compression_block_info.short_ids, compress_start_datetime,
            compress_end_datetime,
            (compress_end_datetime - compress_start_datetime).total_seconds() *
            1000, compression_block_info.txn_count,
            compression_block_info.compressed_block_hash,
            compression_block_info.prev_block_hash,
            compression_block_info.original_size,
            compression_block_info.compressed_size,
            compression_block_info.compression_rate,
            compression_block_info.ignored_short_ids)
        return CompactBlockCompressionResult(True, block_info, bx_block, None,
                                             [], [])
 def block_to_bx_block(self, block_msg,
                       tx_service) -> Tuple[memoryview, BlockInfo]:
     return block_msg.rawbytes(), \
            BlockInfo(convert.bytes_to_hex(self.PREV_BLOCK.binary), [], datetime.datetime.utcnow(),
                      datetime.datetime.utcnow(), 0, 0, None, None, 0, 0, 0)
    def compact_block_to_bx_block(
        self, compact_block: CompactBlockBtcMessage,
        transaction_service: TransactionService
    ) -> CompactBlockCompressionResult:
        """
         Handle decompression of Bitcoin compact block.
         Decompression converts compact block message to full block message.
         """
        compress_start_datetime = datetime.utcnow()
        block_header = compact_block.block_header()
        sha256_hash = hashlib.sha256()
        sha256_hash.update(block_header)
        sha256_hash.update(compact_block.short_nonce_buf())
        hex_digest = sha256_hash.digest()
        key = hex_digest[0:16]

        short_ids = compact_block.short_ids()

        short_id_to_tx_contents = {}

        for tx_hash in transaction_service.iter_transaction_hashes():
            tx_hash_binary = tx_hash.binary[::-1]
            tx_short_id = compute_short_id(key, tx_hash_binary)
            if tx_short_id in short_ids:
                tx_content = transaction_service.get_transaction_by_hash(
                    tx_hash)
                if tx_content is None:
                    logger.debug(
                        "Hash {} is known by transactions service but content is missing.",
                        tx_hash)
                else:
                    short_id_to_tx_contents[tx_short_id] = tx_content
            if len(short_id_to_tx_contents) == len(short_ids):
                break

        block_transactions = []
        missing_transactions_indices = []
        pre_filled_transactions = compact_block.pre_filled_transactions()
        total_txs_count = len(pre_filled_transactions) + len(short_ids)

        size = 0
        block_msg_parts = deque()

        block_msg_parts.append(block_header)
        size += len(block_header)

        tx_count_size = btc_messages_util.get_sizeof_btc_varint(
            total_txs_count)
        tx_count_buf = bytearray(tx_count_size)
        btc_messages_util.pack_int_to_btc_varint(total_txs_count, tx_count_buf,
                                                 0)
        block_msg_parts.append(tx_count_buf)
        size += tx_count_size

        short_ids_iter = iter(short_ids.keys())

        for index in range(total_txs_count):
            if index not in pre_filled_transactions:
                short_id = next(short_ids_iter)

                if short_id in short_id_to_tx_contents:
                    short_tx = short_id_to_tx_contents[short_id]
                    block_msg_parts.append(short_tx)
                    block_transactions.append(short_tx)
                    size += len(short_tx)
                else:
                    missing_transactions_indices.append(index)
                    block_transactions.append(None)
            else:
                pre_filled_transaction = pre_filled_transactions[index]
                block_msg_parts.append(pre_filled_transaction)
                block_transactions.append(pre_filled_transaction)
                size += len(pre_filled_transaction)

        recovered_item = CompactBlockRecoveryData(block_transactions,
                                                  block_header,
                                                  compact_block.magic(),
                                                  transaction_service)

        block_info = BlockInfo(compact_block.block_hash(), [],
                               compress_start_datetime,
                               compress_start_datetime, 0, None, None, None,
                               len(compact_block.rawbytes()), None, None, [])

        if len(missing_transactions_indices) > 0:
            recovery_index = self._last_recovery_idx
            self._last_recovery_idx += 1
            self._recovery_items[
                recovery_index] = recovered_item  # pyre-ignore
            return CompactBlockCompressionResult(False, block_info, None,
                                                 recovery_index,
                                                 missing_transactions_indices,
                                                 [])
        result = CompactBlockCompressionResult(False, block_info, None, None,
                                               [], [])
        return self._recovered_compact_block_to_bx_block(
            result, recovered_item)
Beispiel #23
0
    def block_to_bx_block(
        self,
        block_msg: OntConsensusMessage,
        tx_service: TransactionService,
        enable_block_compression: bool,
        min_tx_age_seconds: float
    ) -> Tuple[memoryview, BlockInfo]:
        """
        Pack an Ontology consensus message's transactions into a bloXroute block.
        """
        consensus_msg = block_msg
        compress_start_datetime = datetime.utcnow()
        compress_start_timestamp = time.time()
        size = 0
        buf = deque()
        short_ids = []
        ignored_sids = []
        original_size = len(consensus_msg.rawbytes())

        consensus_payload_header = consensus_msg.consensus_payload_header()
        consensus_payload_header_len = bytearray(ont_constants.ONT_INT_LEN)
        struct.pack_into("<L", consensus_payload_header_len, 0, len(consensus_payload_header))
        size += ont_constants.ONT_INT_LEN
        buf.append(consensus_payload_header_len)
        size += len(consensus_payload_header)
        buf.append(consensus_payload_header)
        consensus_data_type = bytearray(ont_constants.ONT_CHAR_LEN)
        struct.pack_into("<B", consensus_data_type, 0, consensus_msg.consensus_data_type())
        size += ont_constants.ONT_CHAR_LEN
        buf.append(consensus_data_type)
        consensus_data_len = bytearray(ont_constants.ONT_INT_LEN)
        struct.pack_into("<L", consensus_data_len, 0, consensus_msg.consensus_data_len())
        size += ont_constants.ONT_INT_LEN
        buf.append(consensus_data_len)
        block_start_len = consensus_msg.block_start_len_memoryview()
        txn_header = consensus_msg.txn_header()
        block_start_len_and_txn_header_total_len = bytearray(ont_constants.ONT_INT_LEN)
        struct.pack_into("<L", block_start_len_and_txn_header_total_len, 0, len(block_start_len) + len(txn_header))
        size += ont_constants.ONT_INT_LEN
        buf.append(block_start_len_and_txn_header_total_len)
        size += len(block_start_len)
        buf.append(block_start_len)
        size += len(txn_header)
        buf.append(txn_header)
        max_timestamp_for_compression = time.time() - min_tx_age_seconds

        for tx in consensus_msg.txns():
            tx_hash, _ = ont_messages_util.get_txid(tx)
            short_id = tx_service.get_short_id(tx_hash)
            short_id_assign_time = 0

            if short_id != constants.NULL_TX_SID:
                short_id_assign_time = tx_service.get_short_id_assign_time(short_id)

            if short_id == constants.NULL_TX_SID or \
                    not enable_block_compression or \
                    short_id_assign_time > max_timestamp_for_compression:
                if short_id != constants.NULL_TX_SIDS:
                    ignored_sids.append(short_id)
                buf.append(tx)
                size += len(tx)
            else:
                short_ids.append(short_id)
                buf.append(ont_constants.ONT_SHORT_ID_INDICATOR_AS_BYTEARRAY)
                size += 1

        # Prepend owner and signature, consensus payload tail, tx count and block hash to bx_block
        owner_and_signature = consensus_msg.owner_and_signature()
        owner_and_signature_len = bytearray(ont_constants.ONT_INT_LEN)
        struct.pack_into("<L", owner_and_signature_len, 0, len(owner_and_signature))
        size += len(owner_and_signature)
        buf.appendleft(owner_and_signature)
        size += ont_constants.ONT_INT_LEN
        buf.appendleft(owner_and_signature_len)
        payload_tail = consensus_msg.payload_tail()
        payload_tail_len = bytearray(ont_constants.ONT_INT_LEN)
        struct.pack_into("<L", payload_tail_len, 0, len(payload_tail))
        size += len(payload_tail)
        buf.appendleft(payload_tail)
        size += ont_constants.ONT_INT_LEN
        buf.appendleft(payload_tail_len)
        txn_count = bytearray(ont_constants.ONT_INT_LEN)
        struct.pack_into("<L", txn_count, 0, consensus_msg.txn_count())
        size += ont_constants.ONT_INT_LEN
        buf.appendleft(txn_count)
        block_hash = consensus_msg.block_hash().binary
        size += ont_constants.ONT_HASH_LEN
        buf.appendleft(block_hash)

        is_consensus_msg_buf = struct.pack("?", True)
        buf.appendleft(is_consensus_msg_buf)
        size += 1

        block = finalize_block_bytes(buf, size, short_ids)

        prev_block_hash = convert.bytes_to_hex(consensus_msg.prev_block_hash().binary)
        bx_block_hash = convert.bytes_to_hex(crypto.double_sha256(block))

        block_info = BlockInfo(
            consensus_msg.block_hash(),
            short_ids,
            compress_start_datetime,
            datetime.utcnow(),
            (time.time() - compress_start_timestamp) * 1000,
            consensus_msg.txn_count(),
            bx_block_hash,
            prev_block_hash,
            original_size,
            size,
            100 - float(size) / original_size * 100,
            ignored_sids
        )

        return memoryview(block), block_info
Beispiel #24
0
    def block_to_bx_block(
            self, block_msg: OntConsensusMessage, tx_service: TransactionService
    ) -> Tuple[memoryview, BlockInfo]:
        """
        Compresses a Ontology consensus message's transactions and packs it into a bloXroute block.
        """
        consensus_msg = block_msg
        compress_start_datetime = datetime.utcnow()
        compress_start_timestamp = time.time()
        size = 0
        buf = deque()
        short_ids = []
        consensus_payload_header = consensus_msg.consensus_payload_header()
        consensus_payload_header_len = bytearray(ont_constants.ONT_INT_LEN)
        struct.pack_into("<L", consensus_payload_header_len, 0, len(consensus_payload_header))
        size += ont_constants.ONT_INT_LEN
        buf.append(consensus_payload_header_len)
        size += len(consensus_payload_header)
        buf.append(consensus_payload_header)
        consensus_data_type = bytearray(ont_constants.ONT_CHAR_LEN)
        struct.pack_into("<B", consensus_data_type, 0, consensus_msg.consensus_data_type())
        size += ont_constants.ONT_CHAR_LEN
        buf.append(consensus_data_type)
        consensus_data_len = bytearray(ont_constants.ONT_INT_LEN)
        struct.pack_into("<L", consensus_data_len, 0, consensus_msg.consensus_data_len())
        size += ont_constants.ONT_INT_LEN
        buf.append(consensus_data_len)
        block_start_len = consensus_msg.block_start_len_memoryview()
        txn_header = consensus_msg.txn_header()
        block_start_len_and_txn_header_total_len = bytearray(ont_constants.ONT_INT_LEN)
        struct.pack_into("<L", block_start_len_and_txn_header_total_len, 0, len(block_start_len) + len(txn_header))
        size += ont_constants.ONT_INT_LEN
        buf.append(block_start_len_and_txn_header_total_len)
        size += len(block_start_len)
        buf.append(block_start_len)
        size += len(txn_header)
        buf.append(txn_header)

        for tx in consensus_msg.txns():
            tx_hash, _ = ont_messages_util.get_txid(tx)
            short_id = tx_service.get_short_id(tx_hash)
            if short_id == constants.NULL_TX_SID:
                buf.append(tx)
                size += len(tx)
            else:
                short_ids.append(short_id)
                buf.append(ont_constants.ONT_SHORT_ID_INDICATOR_AS_BYTEARRAY)
                size += 1

        # Prepend owner and signature, consensus payload tail, tx count and block hash to bx_block
        owner_and_signature = consensus_msg.owner_and_signature()
        owner_and_signature_len = bytearray(ont_constants.ONT_INT_LEN)
        struct.pack_into("<L", owner_and_signature_len, 0, len(owner_and_signature))
        size += len(owner_and_signature)
        buf.appendleft(owner_and_signature)
        size += ont_constants.ONT_INT_LEN
        buf.appendleft(owner_and_signature_len)
        payload_tail = consensus_msg.payload_tail()
        payload_tail_len = bytearray(ont_constants.ONT_INT_LEN)
        struct.pack_into("<L", payload_tail_len, 0, len(payload_tail))
        size += len(payload_tail)
        buf.appendleft(payload_tail)
        size += ont_constants.ONT_INT_LEN
        buf.appendleft(payload_tail_len)
        txn_count = bytearray(ont_constants.ONT_INT_LEN)
        struct.pack_into("<L", txn_count, 0, consensus_msg.txn_count())
        size += ont_constants.ONT_INT_LEN
        buf.appendleft(txn_count)
        block_hash = consensus_msg.block_hash().binary
        size += ont_constants.ONT_HASH_LEN
        buf.appendleft(block_hash)

        is_consensus_msg_buf = struct.pack("?", True)
        buf.appendleft(is_consensus_msg_buf)
        size += 1

        serialized_short_ids = compact_block_short_ids_serializer.serialize_short_ids_into_bytes(short_ids)
        buf.append(serialized_short_ids)
        size += constants.UL_ULL_SIZE_IN_BYTES
        offset_buf = struct.pack("<Q", size)
        buf.appendleft(offset_buf)
        size += len(serialized_short_ids)

        block = bytearray(size)
        off = 0
        for blob in buf:
            next_off = off + len(blob)
            block[off:next_off] = blob
            off = next_off

        prev_block_hash = convert.bytes_to_hex(consensus_msg.prev_block_hash().binary)
        bx_block_hash = convert.bytes_to_hex(crypto.double_sha256(block))
        original_size = len(consensus_msg.rawbytes())

        block_info = BlockInfo(
            consensus_msg.block_hash(),
            short_ids,
            compress_start_datetime,
            datetime.utcnow(),
            (time.time() - compress_start_timestamp) * 1000,
            consensus_msg.txn_count(),
            bx_block_hash,
            prev_block_hash,
            original_size,
            size,
            100 - float(size) / original_size * 100
        )
        return memoryview(block), block_info
Beispiel #25
0
    def bx_block_to_block(self, bx_block_msg,
                          tx_service) -> BlockDecompressionResult:
        decompress_start_datetime = datetime.datetime.utcnow()
        decompress_start_timestamp = time.time()
        tsk = self.decompression_tasks.borrow_task()
        tsk.init(tpe.InputBytes(bx_block_msg), tx_service.proxy)
        try:
            task_pool_proxy.run_task(tsk)
        except tpe.AggregatedException as e:
            block_hash = Sha256Hash(
                convert.hex_to_bytes(tsk.block_hash().hex_string()))
            self.decompression_tasks.return_task(tsk)
            # TODO find a better solution
            raise message_conversion_error.eth_block_decompression_error(
                block_hash, e)

        total_tx_count = tsk.tx_count()
        unknown_tx_hashes = [
            Sha256Hash(bytearray(unknown_tx_hash.binary()))
            for unknown_tx_hash in tsk.unknown_tx_hashes()
        ]
        unknown_tx_sids = tsk.unknown_tx_sids()
        block_hash = Sha256Hash(
            convert.hex_to_bytes(tsk.block_hash().hex_string()))

        if tsk.success():
            starting_offset = tsk.starting_offset()
            block = memoryview(tsk.block_message())[starting_offset:]
            block_msg = InternalEthBlockInfo(block)
            content_size = len(block_msg.rawbytes())
            logger.debug(
                "Successfully parsed block broadcast message. {} "
                "transactions in block {}", total_tx_count, block_hash)
            bx_block_hash = convert.bytes_to_hex(
                crypto.double_sha256(bx_block_msg))
            compressed_size = len(bx_block_msg)

            block_info = BlockInfo(
                block_hash, tsk.short_ids(), decompress_start_datetime,
                datetime.datetime.utcnow(),
                (time.time() - decompress_start_timestamp) * 1000,
                total_tx_count, bx_block_hash,
                convert.bytes_to_hex(block_msg.prev_block_hash().binary),
                len(block_msg.rawbytes()), compressed_size,
                100 - float(compressed_size) / content_size * 100)
        else:
            block_msg = None

            logger.debug(
                "Block recovery needed for {}. Missing {} sids, {} tx hashes. "
                "Total txs in block: {}", block_hash, len(unknown_tx_sids),
                len(unknown_tx_hashes), total_tx_count)
            block_info = BlockInfo(
                block_hash, tsk.short_ids(), decompress_start_datetime,
                datetime.datetime.utcnow(),
                (time.time() - decompress_start_timestamp) * 1000, None, None,
                None, None, None, None)

        self.decompression_tasks.return_task(tsk)
        return BlockDecompressionResult(block_msg, block_info, unknown_tx_sids,
                                        unknown_tx_hashes)