Exemple #1
0
class EthPendingTransactionFeed(Feed[EthTransactionFeedEntry, EthRawTransaction]):
    NAME = rpc_constants.ETH_PENDING_TRANSACTION_FEED_NAME
    FIELDS = ["tx_hash", "tx_contents"]

    published_transactions: ExpiringSet[Sha256Hash]

    def __init__(self, alarm_queue: AlarmQueue) -> None:
        super().__init__(self.NAME)

        # enforce uniqueness, since multiple sources can publish to
        # pending transactions (eth ws + remote)
        self.published_transactions = ExpiringSet(
            alarm_queue, EXPIRATION_TIME_S, "pendingTxs"
        )

    def subscribe(
        self, options: Dict[str, Any]
    ) -> Subscriber[EthTransactionFeedEntry]:
        duplicates = options.get("duplicates", None)
        if duplicates is not None:
            if not isinstance(duplicates, bool):
                raise RpcInvalidParams(
                    "\"duplicates\" must be a boolean"
                )

        return super().subscribe(options)

    def publish(self, raw_message: EthRawTransaction) -> None:
        if (
            raw_message.tx_hash in self.published_transactions
            and not self.any_subscribers_want_duplicates()
        ):
            return

        super().publish(raw_message)

        self.published_transactions.add(raw_message.tx_hash)

    def serialize(self, raw_message: EthRawTransaction) -> EthTransactionFeedEntry:
        return EthTransactionFeedEntry(raw_message.tx_hash, raw_message.tx_contents)

    def any_subscribers_want_duplicates(self) -> bool:
        for subscriber in self.subscribers.values():
            if subscriber.options.get("duplicates", False):
                return True
        return False

    def should_publish_message_to_subscriber(
        self,
        subscriber: Subscriber[EthTransactionFeedEntry],
        raw_message: EthRawTransaction,
        serialized_message: EthTransactionFeedEntry
    ) -> bool:
        if (
            raw_message.tx_hash in self.published_transactions
            and not subscriber.options.get("duplicates", False)
        ):
            return False
        else:
            return True
    def test_get_object_size(self):
        mock_node = MockNode(get_common_opts(1234))
        object_size = memory_utils.get_object_size(mock_node)

        self.assertIsInstance(object_size, ObjectSize)
        self.assertTrue(object_size.size > 0)
        self.assertTrue(object_size.flat_size > 0)
        self.assertTrue(object_size.is_actual_size)
        self.assertEqual(0, len(object_size.references))

        ex_set = ExpiringSet(AlarmQueue(), 10, "testset")
        s = set()
        h1 = Sha256Hash(b"1" * 32)
        h2 = Sha256Hash(b"0" * 32)
        print(memory_utils.get_object_size(ex_set).size)
        print(memory_utils.get_object_size(s).size)
        print(memory_utils.get_object_size(h1).size)
        print(memory_utils.get_object_size(h2).size)
        print(memory_utils.get_special_size(ex_set).size)
        print(memory_utils.get_special_size(s).size)
        print(memory_utils.get_special_size(h1).size)
        print(memory_utils.get_special_size(h2).size)
        ex_set.add(h1)
        ex_set.add(h2)
        s.add(h1)
        s.add(h2)
        print(memory_utils.get_object_size(ex_set).size)
        print(memory_utils.get_special_size(ex_set).size)
        print(memory_utils.get_object_size(s).size)
Exemple #3
0
class ExpiringSetTest(AbstractTestCase):
    ALARM_QUEUE = AlarmQueue()
    EXPIRATION_TIME_S = 1

    def setUp(self):
        self.sut = ExpiringSet(self.ALARM_QUEUE, self.EXPIRATION_TIME_S)

    def test_cleanup(self):
        test_item = "dummy_text"
        self.sut.add(test_item)
        self.assertTrue(test_item in self.sut.contents)

        time.time = MagicMock(return_value=time.time() + self.EXPIRATION_TIME_S + 1)
        self.ALARM_QUEUE.fire_alarms()
        self.assertFalse(test_item in self.sut.contents)

    def test_cleanup__not_existing_item(self):
        test_item = "dummy_text"
        self.sut.add(test_item)
        self.assertTrue(test_item in self.sut.contents)

        self.sut.contents.remove(test_item)
        self.assertFalse(test_item in self.sut.contents)

        time.time = MagicMock(return_value=time.time() + self.EXPIRATION_TIME_S + 1)
        self.ALARM_QUEUE.fire_alarms()
        self.assertFalse(test_item in self.sut.contents)

    def test_get_recent(self):
        for i in range(5):
            self.sut.add(i)

        self.assertEqual([4, 3, 2], self.sut.get_recent_items(3))
        self.assertEqual([4, 3, 2, 1, 0], self.sut.get_recent_items(6))
        self.assertEqual([4], self.sut.get_recent_items(1))
Exemple #4
0
class EthNewBlockFeed(Feed[EthBlockFeedEntry, EthRawBlock]):
    NAME = "newBlocks"
    FIELDS = ["hash", "header", "transactions", "uncles"]
    ALL_FIELDS = FIELDS
    VALID_SOURCES = {
        FeedSource.BLOCKCHAIN_SOCKET, FeedSource.BLOCKCHAIN_RPC,
        FeedSource.BDN_SOCKET, FeedSource.BDN_INTERNAL
    }
    published_blocks: ExpiringSet[Sha256Hash]
    published_blocks_height: ExpiringSet[int]

    def __init__(
        self,
        node: "EthGatewayNode",
        network_num: int = constants.ALL_NETWORK_NUM,
    ) -> None:
        super().__init__(self.NAME, network_num=network_num)
        self.last_block_number = 0
        self.hash_for_last_block_number = set()
        self.node = node
        self.published_blocks = ExpiringSet(
            node.alarm_queue,
            gateway_constants.MAX_BLOCK_CACHE_TIME_S,
            name="published_blocks")
        self.published_blocks_height = ExpiringSet(
            node.alarm_queue,
            gateway_constants.MAX_BLOCK_CACHE_TIME_S,
            name="published_blocks_height")

    def serialize(self, raw_message: EthRawBlock) -> EthBlockFeedEntry:
        block_message = raw_message.block
        assert block_message is not None
        return EthBlockFeedEntry(raw_message.block_hash, block_message)

    def publish_blocks_from_queue(self, start_block_height,
                                  end_block_height) -> Set[int]:
        missing_blocks = set()
        block_queuing_service = cast(
            EthBlockQueuingService,
            self.node.block_queuing_service_manager.
            get_designated_block_queuing_service())
        if block_queuing_service is None:
            return missing_blocks

        for block_number in range(start_block_height, end_block_height):
            block_hash = block_queuing_service.accepted_block_hash_at_height.contents.get(
                block_number)
            if block_hash:
                self.publish(
                    EthRawBlock(
                        block_number, block_hash, FeedSource.BDN_INTERNAL,
                        self.node._get_block_message_lazy(None, block_hash)))
            else:
                missing_blocks.add(block_number)
        return missing_blocks

    def publish(self, raw_message: EthRawBlock) -> None:
        logger.trace("attempting to publish message: {} for feed {}",
                     raw_message, self.name)
        if raw_message.source not in self.VALID_SOURCES:
            return
        if self.subscriber_count() == 0:
            return

        block_hash = raw_message.block_hash
        block_number = raw_message.block_number

        if block_number < self.last_block_number - gateway_constants.MAX_BLOCK_BACKLOG_TO_PUBLISH:
            # published block is too far behind ignore
            return

        if block_hash in self.published_blocks:
            # already published ignore
            return

        if raw_message.block is None:
            block_queuing_service = cast(
                EthBlockQueuingService,
                self.node.block_queuing_service_manager.
                get_designated_block_queuing_service())
            best_accepted_height, _ = block_queuing_service.best_accepted_block

            logger.warning(
                "{} Feed Failed to recover block for message: {},"
                "last_block_published {} last block in queueing service {}",
                self.name, raw_message, self.last_block_number,
                best_accepted_height)
            return

        self.published_blocks.add(block_hash)
        self.published_blocks_height.add(block_number)

        if self.last_block_number and block_number > self.last_block_number + 1:
            # try to publish all intermediate blocks first
            missing_blocks = self.publish_blocks_from_queue(
                self.last_block_number + 1, block_number - 1)
            if missing_blocks:
                logger.info(
                    "Attempting to publish to feed block: {}, missing previous blocks {} ",
                    block_number, missing_blocks)

        logger.debug("{} Processing new block message: {}", self.name,
                     raw_message)
        super(EthNewBlockFeed, self).publish(raw_message)

        if block_number in self.published_blocks_height and block_number <= self.last_block_number:
            # possible fork, try to republish all later blocks
            _missing_blocks = self.publish_blocks_from_queue(
                block_number + 1, self.last_block_number)

        if block_number > self.last_block_number:
            self.last_block_number = block_number
class EthTransactionReceiptsFeed(Feed[TransactionReceiptsFeedEntry, Union[EthRawBlock, Dict]]):
    NAME = rpc_constants.ETH_TRANSACTION_RECEIPTS_FEED_NAME
    FIELDS = [
        "receipt",
        "receipt.block_hash",
        "receipt.block_number",
        "receipt.contract_address",
        "receipt.cumulative_gas_used",
        "receipt.from",
        "receipt.gas_used",
        "receipt.logs",
        "receipt.logs_bloom",
        "receipt.status",
        "receipt.to",
        "receipt.transaction_hash",
        "receipt.transaction_index"
    ]
    ALL_FIELDS = ["receipt"]
    VALID_SOURCES = {
        FeedSource.BLOCKCHAIN_SOCKET, FeedSource.BLOCKCHAIN_RPC, FeedSource.BDN_SOCKET
    }

    def __init__(self, node: "EthGatewayNode", network_num: int = constants.ALL_NETWORK_NUM, ) -> None:
        super().__init__(self.NAME, network_num)
        self.node = node
        self.last_block_number = 0
        self.published_blocks = ExpiringSet(
            node.alarm_queue, gateway_constants.MAX_BLOCK_CACHE_TIME_S, name="receipts_feed_published_blocks"
        )
        self.published_blocks_height = ExpiringSet(
            node.alarm_queue, gateway_constants.MAX_BLOCK_CACHE_TIME_S, name="receipts_feed_published_blocks_height"
        )
        self.blocks_confirmed_by_new_heads_notification = ExpiringSet(
            node.alarm_queue, gateway_constants.MAX_BLOCK_CACHE_TIME_S, name="receipts_feed_newHeads_confirmed_blocks"
        )

    def serialize(self, raw_message: Union[EthRawBlock, Dict]) -> TransactionReceiptsFeedEntry:
        # only receipts are serialized for publishing
        if isinstance(raw_message, Dict):
            return TransactionReceiptsFeedEntry(raw_message["result"])
        else:
            raise NotImplementedError

    def publish(self, raw_message: Union[EthRawBlock, Dict]) -> None:
        logger.trace(
            "attempting to publish message: {} for feed {}", raw_message, self.name
        )
        if raw_message.source not in self.VALID_SOURCES or self.subscriber_count() == 0:
            return
        if isinstance(raw_message, Dict):
            # transaction receipts published via parent publish method
            raise NotImplementedError

        block_hash = raw_message.block_hash
        block_number = raw_message.block_number
        block = raw_message.block

        # receipts won't be available until NewHeads feed notification
        if raw_message.source in [FeedSource.BLOCKCHAIN_RPC, FeedSource.BLOCKCHAIN_SOCKET]:
            self.blocks_confirmed_by_new_heads_notification.add(block_hash)

        if block_number < self.last_block_number - gateway_constants.MAX_BLOCK_BACKLOG_TO_PUBLISH:
            # published block is too far behind, ignore
            return

        if block_hash in self.published_blocks:
            # already published ignore
            return

        if block is None:
            block = cast(InternalEthBlockInfo, self.node.block_queuing_service_manager.get_block_data(block_hash))
            if block is None:
                return

        assert block is not None

        if block_hash not in self.blocks_confirmed_by_new_heads_notification:
            return

        if raw_message.source == FeedSource.BDN_SOCKET:
            block = block.to_new_block_msg()

        self.published_blocks.add(block_hash)
        self.published_blocks_height.add(block_number)

        if self.last_block_number and block_number > self.last_block_number + 1:
            # try to publish all intermediate blocks first
            missing_blocks = self._publish_blocks_from_queue(self.last_block_number + 1, block_number - 1)
            if missing_blocks:
                logger.info(
                    "Attempting to publish to feed block: {}, missing previous blocks {} ", block_number, missing_blocks
                )

        logger.debug("{} Attempting to fetch transaction receipts for block {}", self.name, block_hash)
        block_hash_str = block_hash.to_string(True)
        for tx in block.txns():
            asyncio.create_task(self._publish(tx.hash().to_string(True), block_hash_str))

        if block_number in self.published_blocks_height and block_number <= self.last_block_number:
            # possible fork, try to republish all later blocks
            _missing_blocks = self._publish_blocks_from_queue(block_number + 1, self.last_block_number)

        if block_number > self.last_block_number:
            self.last_block_number = block_number

    async def _publish(
        self,
        transaction_hash: str,
        block_hash: str,
        retry_count: int = 0
    ) -> None:
        response = None
        try:
            response = await self.node.eth_ws_proxy_publisher.call_rpc(
                rpc_constants.ETH_GET_TRANSACTION_RECEIPT_RPC_METHOD, [transaction_hash],
            )
        except RpcError as e:
            error_response = e.to_json()
            logger.warning(
                "Failed to fetch transaction receipt for {} in block {}: {}. Ceasing attempts.",
                transaction_hash, block_hash, error_response
            )

        assert response is not None

        if response.result is None:
            if retry_count == 0 or retry_count == RETRIES_MAX_ATTEMPTS:
                logger.debug(
                    "Failed to fetch transaction receipt for tx {} in block {}: not found. "
                    "Attempt: {}. Max attempts: {}.",
                    transaction_hash, block_hash, retry_count + 1, RETRIES_MAX_ATTEMPTS + 1
                )
            if retry_count < RETRIES_MAX_ATTEMPTS:
                sleep_time = utils.fibonacci(retry_count + 1) * 0.1
                await asyncio.sleep(sleep_time)
                asyncio.create_task(self._publish(transaction_hash, block_hash, retry_count + 1))
            return

        response.result = humps.decamelize(response.result)
        json_response = response.to_json()
        if json_response["result"]["block_hash"] != block_hash:
            return
        super().publish(json_response)

        if retry_count > 0:
            logger.debug(
                "Succeeded in fetching receipt for tx {} in block {} after {} attempts.",
                transaction_hash, block_hash, retry_count
            )

    def _publish_blocks_from_queue(self, start_block_height, end_block_height) -> Set[int]:
        missing_blocks = set()
        block_queuing_service = cast(
            EthBlockQueuingService,
            self.node.block_queuing_service_manager.get_designated_block_queuing_service()
        )
        if block_queuing_service is None:
            return missing_blocks

        for block_number in range(start_block_height, end_block_height):
            block_hash = block_queuing_service.accepted_block_hash_at_height.contents.get(block_number)
            if block_hash:
                block = self.node.block_queuing_service_manager.get_block_data(block_hash)
                if block is not None:
                    block_hash_str = block_hash.to_string(True)
                    for tx in block.txns():
                        asyncio.create_task(self._publish(tx.hash().to_string(True), block_hash_str))
            else:
                missing_blocks.add(block_number)
        return missing_blocks
class EthPendingTransactionFeed(Feed[EthTransactionFeedEntry,
                                     EthRawTransaction]):
    NAME = rpc_constants.ETH_PENDING_TRANSACTION_FEED_NAME
    FIELDS = [
        "tx_hash",
        "tx_contents",
        "tx_contents.nonce",
        "tx_contents.gas_price",
        "tx_contents.gas",
        "tx_contents.to",
        "tx_contents.value",
        "tx_contents.input",
        "tx_contents.v",
        "tx_contents.r",
        "tx_contents.s",
        "tx_contents.from",
        "local_region",
    ]
    FILTERS = {"value", "from", "to"}
    ALL_FIELDS = ["tx_hash", "tx_contents", "local_region"]

    published_transactions: ExpiringSet[Sha256Hash]

    def __init__(
        self,
        alarm_queue: AlarmQueue,
        network_num: int = constants.ALL_NETWORK_NUM,
    ) -> None:
        super().__init__(self.NAME, network_num=network_num)

        # enforce uniqueness, since multiple sources can publish to
        # pending transactions (eth ws + remote)
        self.published_transactions = ExpiringSet(alarm_queue,
                                                  EXPIRATION_TIME_S,
                                                  "pendingTxs")

    def subscribe(
            self, options: Dict[str,
                                Any]) -> Subscriber[EthTransactionFeedEntry]:
        duplicates = options.get("duplicates", None)
        if duplicates is not None:
            if not isinstance(duplicates, bool):
                raise RpcInvalidParams('"duplicates" must be a boolean')

        return super().subscribe(options)

    def publish(self, raw_message: EthRawTransaction) -> None:
        if (raw_message.tx_hash in self.published_transactions
                and not self.any_subscribers_want_duplicates()):
            return

        super().publish(raw_message)

        self.published_transactions.add(raw_message.tx_hash)

    def serialize(self,
                  raw_message: EthRawTransaction) -> EthTransactionFeedEntry:
        return EthTransactionFeedEntry(raw_message.tx_hash,
                                       raw_message.tx_contents,
                                       raw_message.local_region)

    def any_subscribers_want_duplicates(self) -> bool:
        for subscriber in self.subscribers.values():
            if subscriber.options.get("duplicates", False):
                return True
        return False

    def should_publish_message_to_subscriber(
        self,
        subscriber: Subscriber[EthTransactionFeedEntry],
        raw_message: EthRawTransaction,
        serialized_message: EthTransactionFeedEntry,
    ) -> bool:
        if (raw_message.tx_hash in self.published_transactions
                and not subscriber.options.get("duplicates", False)):
            return False
        should_publish = True
        if subscriber.filters:
            logger_filters.trace(
                "checking if should publish to {} with filters {}",
                subscriber.subscription_id,
                subscriber.filters,
            )
            contents = serialized_message.tx_contents
            state = {
                "value":
                eth_filter_handlers.reformat_tx_value(contents["value"]),
                "to": eth_filter_handlers.reformat_address(contents["to"]),
                "from": eth_filter_handlers.reformat_address(contents["from"]),
            }
            should_publish = subscriber.validator(state)
            logger_filters.trace("should publish: {}", should_publish)
        return should_publish
class EthPendingTransactionFeed(Feed[EthTransactionFeedEntry,
                                     EthRawTransaction]):
    NAME = rpc_constants.ETH_PENDING_TRANSACTION_FEED_NAME
    FIELDS = ["tx_hash", "tx_contents"]
    FILTERS = {"transaction_value_range_eth", "from", "to"}

    published_transactions: ExpiringSet[Sha256Hash]

    def __init__(self, alarm_queue: AlarmQueue) -> None:
        super().__init__(self.NAME)

        # enforce uniqueness, since multiple sources can publish to
        # pending transactions (eth ws + remote)
        self.published_transactions = ExpiringSet(alarm_queue,
                                                  EXPIRATION_TIME_S,
                                                  "pendingTxs")

    def subscribe(
            self, options: Dict[str,
                                Any]) -> Subscriber[EthTransactionFeedEntry]:
        duplicates = options.get("duplicates", None)
        if duplicates is not None:
            if not isinstance(duplicates, bool):
                raise RpcInvalidParams('"duplicates" must be a boolean')

        return super().subscribe(options)

    def publish(self, raw_message: EthRawTransaction) -> None:
        if (raw_message.tx_hash in self.published_transactions
                and not self.any_subscribers_want_duplicates()):
            return

        super().publish(raw_message)

        self.published_transactions.add(raw_message.tx_hash)

    def serialize(self,
                  raw_message: EthRawTransaction) -> EthTransactionFeedEntry:
        return EthTransactionFeedEntry(raw_message.tx_hash,
                                       raw_message.tx_contents)

    def any_subscribers_want_duplicates(self) -> bool:
        for subscriber in self.subscribers.values():
            if subscriber.options.get("duplicates", False):
                return True
        return False

    def should_publish_message_to_subscriber(
        self,
        subscriber: Subscriber[EthTransactionFeedEntry],
        raw_message: EthRawTransaction,
        serialized_message: EthTransactionFeedEntry,
    ) -> bool:
        if (raw_message.tx_hash in self.published_transactions
                and not subscriber.options.get("duplicates", False)):
            return False
        should_publish = True
        if subscriber.filters:
            logger_filters.trace(
                "checking if should publish to {} with filters {}",
                subscriber.subscription_id,
                subscriber.filters,
            )
            should_publish = filter_dsl.handle(
                subscriber.filters,
                eth_filter_handlers.handle_filter,
                serialized_message,
            )
        logger_filters.trace("should publish: {}", should_publish)
        return should_publish

    def reformat_filters(self, filters: Dict[str, Any]) -> Dict[str, Any]:
        return filter_dsl.reformat(filters,
                                   eth_filter_handlers.reformat_filter)