Пример #1
0
    def __init__(  # pylint: disable=too-many-arguments
        self,
        web3: Web3,
        contracts: Dict[str, Contract],
        private_key: PrivateKey,
        db_filename: str,
        sync_start_block: BlockNumber,
        required_confirmations: BlockTimeout,
        poll_interval: float,
        matrix_servers: Optional[List[str]] = None,
    ):
        super().__init__()

        self.web3 = web3
        self.registry_address = contracts[
            CONTRACT_TOKEN_NETWORK_REGISTRY].address
        self.user_deposit_contract = contracts[CONTRACT_USER_DEPOSIT]
        self.service_token_address = self.user_deposit_contract.functions.token(
        ).call()
        self.chain_id = ChainID(web3.eth.chainId)
        self.address = private_key_to_address(private_key)
        self.required_confirmations = required_confirmations
        self._poll_interval = poll_interval
        self._is_running = gevent.event.Event()

        log.info("PFS payment address", address=self.address)

        self.database = PFSDatabase(
            filename=db_filename,
            pfs_address=self.address,
            sync_start_block=sync_start_block,
            token_network_registry_address=to_canonical_address(
                self.registry_address),
            chain_id=self.chain_id,
            user_deposit_contract_address=to_canonical_address(
                self.user_deposit_contract.address),
            allow_create=True,
        )

        self.blockchain_state = BlockchainState(
            latest_committed_block=self.database.get_latest_committed_block(),
            token_network_registry_address=to_canonical_address(
                self.registry_address),
            chain_id=self.chain_id,
        )

        self.matrix_listener = MatrixListener(
            private_key=private_key,
            chain_id=self.chain_id,
            device_id=DeviceIDs.PFS,
            message_received_callback=self.handle_message,
            servers=matrix_servers,
        )

        self.token_networks = self._load_token_networks()
        self.updated = gevent.event.Event(
        )  # set whenever blocks are processed
        self.startup_finished = gevent.event.AsyncResult()

        self._init_metrics()
Пример #2
0
    def __init__(  # pylint: disable=too-many-arguments
        self,
        web3: Web3,
        contracts: Dict[str, Contract],
        private_key: str,
        db_filename: str,
        sync_start_block: BlockNumber = BlockNumber(0),
        required_confirmations: int = 8,
        poll_interval: float = 10,
        matrix_servers: List[str] = None,
    ):
        super().__init__()

        self.web3 = web3
        self.registry_address = contracts[
            CONTRACT_TOKEN_NETWORK_REGISTRY].address
        self.user_deposit_contract = contracts[CONTRACT_USER_DEPOSIT]
        self.chain_id = ChainID(int(web3.net.version))
        self.address = private_key_to_address(private_key)
        self.required_confirmations = required_confirmations
        self._poll_interval = poll_interval
        self._is_running = gevent.event.Event()

        log.info("PFS payment address", address=self.address)

        self.blockchain_state = BlockchainState(
            latest_commited_block=BlockNumber(0),
            token_network_registry_address=self.registry_address,
            chain_id=self.chain_id,
        )

        self.database = PFSDatabase(
            filename=db_filename,
            pfs_address=self.address,
            sync_start_block=sync_start_block,
            token_network_registry_address=self.registry_address,
            chain_id=self.chain_id,
            user_deposit_contract_address=self.user_deposit_contract.address,
            allow_create=True,
        )

        self.matrix_listener = MatrixListener(
            private_key=private_key,
            chain_id=self.chain_id,
            service_room_suffix=PATH_FINDING_BROADCASTING_ROOM,
            message_received_callback=self.handle_message,
            address_reachability_changed_callback=self.
            handle_reachability_change,
            servers=matrix_servers,
        )

        self.address_to_reachability: Dict[Address,
                                           AddressReachability] = dict()
        self.token_networks = self._load_token_networks()
Пример #3
0
    def __init__(
        self,
        web3: Web3,
        contracts: Dict[str, Contract],
        private_key: str,
        db_filename: str,
        sync_start_block: BlockNumber = BlockNumber(0),
        required_confirmations: int = 8,
        poll_interval: float = 10,
        service_fee: int = 0,
    ):
        super().__init__()

        self.web3 = web3
        self.registry_address = contracts[CONTRACT_TOKEN_NETWORK_REGISTRY].address
        self.sync_start_block = sync_start_block
        self.required_confirmations = required_confirmations
        self.poll_interval = poll_interval
        self.chain_id = ChainID(int(web3.net.version))
        self.private_key = private_key
        self.address = private_key_to_address(private_key)
        self.service_fee = service_fee

        self.is_running = gevent.event.Event()
        self.token_networks: Dict[TokenNetworkAddress, TokenNetwork] = {}
        self.database = PFSDatabase(filename=db_filename, pfs_address=self.address)
        self.user_deposit_contract = contracts[CONTRACT_USER_DEPOSIT]

        self.last_known_block = 0
        self.blockchain_state = BlockchainState(
            chain_id=self.chain_id,
            token_network_registry_address=self.registry_address,
            monitor_contract_address=Address(''),  # FIXME
            latest_known_block=self.sync_start_block,
            token_network_addresses=[],
        )
        log.info(
            'Listening to token network registry',
            registry_address=self.registry_address,
            start_block=sync_start_block,
        )

        try:
            self.matrix_listener = MatrixListener(
                private_key=private_key,
                chain_id=self.chain_id,
                callback=self.handle_message,
                service_room_suffix=PATH_FINDING_BROADCASTING_ROOM,
            )
        except ConnectionError as e:
            log.critical('Could not connect to broadcasting system.', exc=e)
            sys.exit(1)
Пример #4
0
    def __init__(self, private_key: str, state_db: SharedDatabase):
        super().__init__()

        self.private_key = private_key
        self.state_db = state_db

        state = self.state_db.load_state()
        self.chain_id = state.blockchain_state.chain_id
        self.matrix_listener = MatrixListener(
            private_key=private_key,
            chain_id=self.chain_id,
            service_room_suffix=MONITORING_BROADCASTING_ROOM,
            message_received_callback=self.handle_message,
        )
Пример #5
0
    def __init__(self, private_key: str, state_db: SharedDatabase):
        super().__init__()

        self.private_key = private_key
        self.state_db = state_db

        state = self.state_db.load_state()
        self.chain_id = state.blockchain_state.chain_id
        try:
            self.matrix_listener = MatrixListener(
                private_key=private_key,
                chain_id=self.chain_id,
                callback=self.handle_message,
                service_room_suffix=MONITORING_BROADCASTING_ROOM,
            )
        except ConnectionError as e:
            log.critical('Could not connect to broadcasting system.', exc=e)
            sys.exit(1)
Пример #6
0
def test_matrix_lister_smoke_test(get_accounts, get_private_key):
    (c1, ) = get_accounts(1)
    url = "http://example.com"
    client_mock = Mock()
    client_mock.api.base_url = url
    client_mock.user_id = "1"
    with patch.multiple("raiden_libs.matrix",
                        make_client=Mock(return_value=client_mock)):
        listener = MatrixListener(
            private_key=get_private_key(c1),
            chain_id=ChainID(1),
            service_room_suffix="_service",
            message_received_callback=lambda _: None,
            address_reachability_changed_callback=lambda _addr, _reachability:
            None,
        )
        listener._start_client()  # pylint: disable=protected-access

    assert listener.startup_finished.is_set()
Пример #7
0
def test_matrix_listener_smoke_test(get_accounts, get_private_key):
    (c1, ) = get_accounts(1)
    client_mock = Mock()
    client_mock.api.base_url = "http://example.com"
    client_mock.user_id = "1"

    with patch.multiple(
            "raiden_libs.matrix",
            make_client=Mock(return_value=client_mock),
    ):
        listener = MatrixListener(
            private_key=get_private_key(c1),
            chain_id=ChainID(61),
            device_id=DeviceIDs.PFS,
            message_received_callback=lambda _: None,
        )
        listener._run()  # pylint: disable=protected-access

    assert listener.startup_finished.done()
Пример #8
0
    def __init__(  # pylint: disable=too-many-arguments
        self,
        web3: Web3,
        contracts: Dict[str, Contract],
        private_key: str,
        db_filename: str,
        sync_start_block: BlockNumber = BlockNumber(0),
        required_confirmations: int = 8,
        poll_interval: float = 10,
    ):
        super().__init__()

        self.web3 = web3
        self.registry_address = contracts[CONTRACT_TOKEN_NETWORK_REGISTRY].address
        self.user_deposit_contract = contracts[CONTRACT_USER_DEPOSIT]
        self.chain_id = ChainID(int(web3.net.version))
        self.address = private_key_to_address(private_key)
        self._required_confirmations = required_confirmations
        self._poll_interval = poll_interval
        self._is_running = gevent.event.Event()

        self.database = PFSDatabase(
            filename=db_filename,
            pfs_address=self.address,
            sync_start_block=sync_start_block,
            token_network_registry_address=self.registry_address,
            chain_id=self.chain_id,
            user_deposit_contract_address=self.user_deposit_contract.address,
            allow_create=True,
        )
        self.token_networks = self._load_token_networks()

        try:
            self.matrix_listener = MatrixListener(
                private_key=private_key,
                chain_id=self.chain_id,
                callback=self.handle_message,
                service_room_suffix=PATH_FINDING_BROADCASTING_ROOM,
            )
        except ConnectionError as exc:
            log.critical("Could not connect to broadcasting system.", exc=exc)
            sys.exit(1)
Пример #9
0
    def __init__(
        self,
        private_key: PrivateKey,
        state_db: SharedDatabase,
        matrix_servers: Optional[List[str]] = None,
    ):
        super().__init__()

        self.private_key = private_key
        self.state_db = state_db

        state = self.state_db.load_state()
        self.chain_id = state.blockchain_state.chain_id
        self.matrix_listener = MatrixListener(
            private_key=private_key,
            chain_id=self.chain_id,
            device_id=DeviceIDs.MS,
            message_received_callback=self.handle_message,
            servers=matrix_servers,
        )
Пример #10
0
class PathfindingService(gevent.Greenlet):
    # pylint: disable=too-many-instance-attributes
    def __init__(  # pylint: disable=too-many-arguments
        self,
        web3: Web3,
        contracts: Dict[str, Contract],
        private_key: str,
        db_filename: str,
        sync_start_block: BlockNumber = BlockNumber(0),
        required_confirmations: int = 8,
        poll_interval: float = 10,
    ):
        super().__init__()

        self.web3 = web3
        self.registry_address = contracts[
            CONTRACT_TOKEN_NETWORK_REGISTRY].address
        self.user_deposit_contract = contracts[CONTRACT_USER_DEPOSIT]
        self.chain_id = ChainID(int(web3.net.version))
        self.address = private_key_to_address(private_key)
        self._required_confirmations = required_confirmations
        self._poll_interval = poll_interval
        self._is_running = gevent.event.Event()

        log.info("PFS payment address", address=self.address)

        self.blockchain_state = BlockchainState(
            latest_known_block=BlockNumber(0),
            token_network_registry_address=self.registry_address,
            chain_id=self.chain_id,
        )

        self.database = PFSDatabase(
            filename=db_filename,
            pfs_address=self.address,
            sync_start_block=sync_start_block,
            token_network_registry_address=self.registry_address,
            chain_id=self.chain_id,
            user_deposit_contract_address=self.user_deposit_contract.address,
            allow_create=True,
        )

        self.matrix_listener = MatrixListener(
            private_key=private_key,
            chain_id=self.chain_id,
            service_room_suffix=PATH_FINDING_BROADCASTING_ROOM,
            message_received_callback=self.handle_message,
            address_reachability_changed_callback=self.
            handle_reachability_change,
        )

        self.address_to_reachability: Dict[Address,
                                           AddressReachability] = dict()
        self.token_networks = self._load_token_networks()

    def _load_token_networks(self) -> Dict[TokenNetworkAddress, TokenNetwork]:
        network_for_address = {
            n.address: n
            for n in self.database.get_token_networks()
        }
        channel_views = self.database.get_channel_views()
        for cv in channel_views:
            network_for_address[cv.token_network_address].add_channel_view(cv)

            # Register channel participants for presence tracking
            self.matrix_listener.follow_address_presence(cv.participant1)
            self.matrix_listener.follow_address_presence(cv.participant2)

        return network_for_address

    def _run(self) -> None:  # pylint: disable=method-hidden
        register_error_handler()
        try:
            self.matrix_listener.start()
        except ConnectionError as exc:
            log.critical("Could not connect to broadcasting system.", exc=exc)
            sys.exit(1)

        log.info(
            "Listening to token network registry",
            registry_address=self.registry_address,
            start_block=self.database.get_latest_known_block(),
        )
        while not self._is_running.is_set():
            last_confirmed_block = self.web3.eth.blockNumber - self._required_confirmations

            max_query_interval_end_block = (
                self.database.get_latest_known_block() + MAX_FILTER_INTERVAL)
            # Limit the max number of blocks that is processed per iteration
            last_block = min(last_confirmed_block,
                             max_query_interval_end_block)

            self._process_new_blocks(last_block)

            try:
                gevent.sleep(self._poll_interval)
            except KeyboardInterrupt:
                log.info("Shutting down")
                sys.exit(0)

    def _process_new_blocks(self, last_block: BlockNumber) -> None:
        self.blockchain_state.latest_known_block = self.database.get_latest_known_block(
        )
        self.blockchain_state.token_network_addresses = list(
            self.token_networks.keys())

        _, events = get_blockchain_events(
            web3=self.web3,
            contract_manager=CONTRACT_MANAGER,
            chain_state=self.blockchain_state,
            to_block=last_block,
        )
        for event in events:
            self.handle_event(event)

    def stop(self) -> None:
        self.matrix_listener.stop()
        self._is_running.set()
        self.matrix_listener.join()

    def follows_token_network(
            self, token_network_address: TokenNetworkAddress) -> bool:
        """ Checks if a token network is followed by the pathfinding service. """
        return token_network_address in self.token_networks.keys()

    def handle_reachability_change(self, address: Address,
                                   reachability: AddressReachability) -> None:
        self.address_to_reachability[address] = reachability

    def get_token_network(
            self, token_network_address: TokenNetworkAddress
    ) -> Optional[TokenNetwork]:
        """ Returns the `TokenNetwork` for the given address or `None` for unknown networks. """
        return self.token_networks.get(token_network_address)

    def handle_event(self, event: Event) -> None:
        if isinstance(event, ReceiveTokenNetworkCreatedEvent):
            self.handle_token_network_created(event)
        elif isinstance(event, ReceiveChannelOpenedEvent):
            self.handle_channel_opened(event)
        elif isinstance(event, ReceiveChannelNewDepositEvent):
            self.handle_channel_new_deposit(event)
        elif isinstance(event, ReceiveChannelClosedEvent):
            self.handle_channel_closed(event)
        elif isinstance(event, UpdatedHeadBlockEvent):
            self.database.update_lastest_known_block(event.head_block_number)
        else:
            log.debug("Unhandled event", evt=event)

    def handle_token_network_created(
            self, event: ReceiveTokenNetworkCreatedEvent) -> None:
        network_address = TokenNetworkAddress(event.token_network_address)
        if not self.follows_token_network(network_address):
            log.info("Found new token network", event_=event)

            self.token_networks[network_address] = TokenNetwork(
                network_address)
            self.database.upsert_token_network(network_address)

    def handle_channel_opened(self, event: ReceiveChannelOpenedEvent) -> None:
        token_network = self.get_token_network(event.token_network_address)
        if token_network is None:
            return

        log.info("Received ChannelOpened event", event_=event)

        self.matrix_listener.follow_address_presence(event.participant1,
                                                     refresh=True)
        self.matrix_listener.follow_address_presence(event.participant2,
                                                     refresh=True)

        channel_views = token_network.handle_channel_opened_event(
            channel_identifier=event.channel_identifier,
            participant1=event.participant1,
            participant2=event.participant2,
            settle_timeout=event.settle_timeout,
        )
        for cv in channel_views:
            self.database.upsert_channel_view(cv)

        # Handle messages for this channel which where received before ChannelOpened
        with self.database.conn:
            for message in self.database.pop_waiting_messages(
                    token_network_address=token_network.address,
                    channel_id=event.channel_identifier):
                self.handle_message(message)

    def handle_channel_new_deposit(
            self, event: ReceiveChannelNewDepositEvent) -> None:
        token_network = self.get_token_network(event.token_network_address)
        if token_network is None:
            return

        log.info("Received ChannelNewDeposit event", event_=event)

        channel_view = token_network.handle_channel_new_deposit_event(
            channel_identifier=event.channel_identifier,
            receiver=event.participant_address,
            total_deposit=event.total_deposit,
        )
        if channel_view:
            self.database.upsert_channel_view(channel_view)

    def handle_channel_closed(self, event: ReceiveChannelClosedEvent) -> None:
        token_network = self.get_token_network(event.token_network_address)
        if token_network is None:
            return

        log.info("Received ChannelClosed event", event_=event)

        token_network.handle_channel_closed_event(
            channel_identifier=event.channel_identifier)
        self.database.delete_channel_views(event.channel_identifier)

    def handle_message(self, message: Message) -> None:
        try:
            if isinstance(message, PFSCapacityUpdate):
                changed_cvs = self.on_capacity_update(message)
            elif isinstance(message, PFSFeeUpdate):
                changed_cvs = self.on_fee_update(message)
            else:
                log.debug("Ignoring message", message=message)

            for cv in changed_cvs:
                self.database.upsert_channel_view(cv)

        except DeferMessage as ex:
            self.defer_message_until_channel_is_open(ex.deferred_message)
        except InvalidGlobalMessage as ex:
            log.info(str(ex), **asdict(message))

    def defer_message_until_channel_is_open(self,
                                            message: DeferableMessage) -> None:
        log.debug(
            "Received message for unknown channel, defer until ChannelOpened is confirmed",
            channel_id=message.canonical_identifier.channel_identifier,
            message=message,
        )
        self.database.insert_waiting_message(message)

    def on_fee_update(self, message: PFSFeeUpdate) -> List[ChannelView]:
        if message.sender != message.updating_participant:
            raise InvalidPFSFeeUpdate(
                "Invalid sender recovered from signature in PFSFeeUpdate")

        token_network = self.get_token_network(
            message.canonical_identifier.token_network_address)
        if not token_network:
            return []

        log.debug("Received Fee Update", message=message)

        if (message.canonical_identifier.channel_identifier
                not in token_network.channel_id_to_addresses):
            raise DeferMessage(message)

        return token_network.handle_channel_fee_update(message)

    def _validate_pfs_capacity_update(
            self, message: PFSCapacityUpdate) -> TokenNetwork:
        token_network_address = TokenNetworkAddress(
            message.canonical_identifier.token_network_address)

        # check if chain_id matches
        if message.canonical_identifier.chain_identifier != self.chain_id:
            raise InvalidCapacityUpdate(
                "Received Capacity Update with unknown chain identifier")

        # check if token network exists
        token_network = self.get_token_network(token_network_address)
        if token_network is None:
            raise InvalidCapacityUpdate(
                "Received Capacity Update with unknown token network")

        # check values < max int 256
        if message.updating_capacity > UINT256_MAX:
            raise InvalidCapacityUpdate(
                "Received Capacity Update with impossible updating_capacity")
        if message.other_capacity > UINT256_MAX:
            raise InvalidCapacityUpdate(
                "Received Capacity Update with impossible other_capacity")

        # check signature of Capacity Update
        if message.sender != message.updating_participant:
            raise InvalidCapacityUpdate("Capacity Update not signed correctly")

        # check if channel exists
        channel_identifier = message.canonical_identifier.channel_identifier
        if channel_identifier not in token_network.channel_id_to_addresses:
            raise DeferMessage(message)

        # check if participants fit to channel id
        participants = token_network.channel_id_to_addresses[
            channel_identifier]
        if message.updating_participant not in participants:
            raise InvalidCapacityUpdate(
                "Sender of Capacity Update does not match the internal channel"
            )
        if message.other_participant not in participants:
            raise InvalidCapacityUpdate(
                "Other Participant of Capacity Update does not match the internal channel"
            )

        return token_network

    def on_capacity_update(self,
                           message: PFSCapacityUpdate) -> List[ChannelView]:
        token_network = self._validate_pfs_capacity_update(message)
        log.debug("Received Capacity Update", message=message)
        self.database.upsert_capacity_update(message)

        # Follow presence for the channel participants
        self.matrix_listener.follow_address_presence(
            message.updating_participant, refresh=True)
        self.matrix_listener.follow_address_presence(message.other_participant,
                                                     refresh=True)

        updating_capacity_partner, other_capacity_partner = self.database.get_capacity_updates(
            updating_participant=message.other_participant,
            token_network_address=TokenNetworkAddress(
                message.canonical_identifier.token_network_address),
            channel_id=message.canonical_identifier.channel_identifier,
        )
        return token_network.handle_channel_balance_update_message(
            message=message,
            updating_capacity_partner=updating_capacity_partner,
            other_capacity_partner=other_capacity_partner,
        )
Пример #11
0
class PathfindingService(gevent.Greenlet):
    # pylint: disable=too-many-instance-attributes
    def __init__(  # pylint: disable=too-many-arguments
        self,
        web3: Web3,
        contracts: Dict[str, Contract],
        private_key: PrivateKey,
        db_filename: str,
        sync_start_block: BlockNumber,
        required_confirmations: BlockTimeout,
        poll_interval: float,
        matrix_servers: Optional[List[str]] = None,
    ):
        super().__init__()

        self.web3 = web3
        self.registry_address = contracts[
            CONTRACT_TOKEN_NETWORK_REGISTRY].address
        self.user_deposit_contract = contracts[CONTRACT_USER_DEPOSIT]
        self.service_token_address = self.user_deposit_contract.functions.token(
        ).call()
        self.chain_id = ChainID(web3.eth.chainId)
        self.address = private_key_to_address(private_key)
        self.required_confirmations = required_confirmations
        self._poll_interval = poll_interval
        self._is_running = gevent.event.Event()

        log.info("PFS payment address", address=self.address)

        self.database = PFSDatabase(
            filename=db_filename,
            pfs_address=self.address,
            sync_start_block=sync_start_block,
            token_network_registry_address=to_canonical_address(
                self.registry_address),
            chain_id=self.chain_id,
            user_deposit_contract_address=to_canonical_address(
                self.user_deposit_contract.address),
            allow_create=True,
        )

        self.blockchain_state = BlockchainState(
            latest_committed_block=self.database.get_latest_committed_block(),
            token_network_registry_address=to_canonical_address(
                self.registry_address),
            chain_id=self.chain_id,
        )

        self.matrix_listener = MatrixListener(
            private_key=private_key,
            chain_id=self.chain_id,
            device_id=DeviceIDs.PFS,
            message_received_callback=self.handle_message,
            servers=matrix_servers,
        )

        self.token_networks = self._load_token_networks()
        self.updated = gevent.event.Event(
        )  # set whenever blocks are processed
        self.startup_finished = gevent.event.AsyncResult()

        self._init_metrics()

    def _init_metrics(self) -> None:
        def _get_number_of_claimed_ious() -> float:
            return float(self.database.get_nof_claimed_ious())

        def _get_total_amount_of_claimed_ious() -> float:
            return float(sum(iou.amount for iou in self._iter_claimed_ious()))

        metrics.get_metrics_for_label(
            metrics.IOU_CLAIMS, metrics.IouStatus.SUCCESSFUL).set_function(
                _get_number_of_claimed_ious)
        metrics.get_metrics_for_label(
            metrics.IOU_CLAIMS_TOKEN,
            metrics.IouStatus.SUCCESSFUL).set_function(
                _get_total_amount_of_claimed_ious)

    def _iter_claimed_ious(self) -> Iterator[IOU]:
        return self.database.get_ious(claimed=True)

    def _load_token_networks(self) -> Dict[TokenNetworkAddress, TokenNetwork]:
        network_for_address = {
            n.address: n
            for n in self.database.get_token_networks()
        }
        for channel in self.database.get_channels():
            for cv in channel.views:
                network_for_address[cv.token_network_address].add_channel_view(
                    cv)

        return network_for_address

    def _run(self) -> None:  # pylint: disable=method-hidden
        try:
            self.matrix_listener.start()
        except (Timeout, ConnectionError) as exc:
            log.critical("Could not connect to broadcasting system.", exc=exc)
            sys.exit(1)

        self.matrix_listener.link(self.startup_finished)
        try:
            self.matrix_listener.startup_finished.get(
                timeout=MATRIX_START_TIMEOUT)
        except Timeout:
            raise Exception("MatrixListener did not start in time.")
        self.startup_finished.set()

        log.info(
            "Listening to token network registry",
            registry_address=self.registry_address,
            start_block=self.database.get_latest_committed_block(),
        )
        while not self._is_running.is_set():
            self._process_new_blocks(
                BlockNumber(self.web3.eth.blockNumber -
                            self.required_confirmations))

            # Let tests waiting for this event know that we're done with processing
            self.updated.set()
            self.updated.clear()

            # Sleep, then collect errors from greenlets
            gevent.sleep(self._poll_interval)
            gevent.joinall({self.matrix_listener}, timeout=0, raise_error=True)

    def _process_new_blocks(self, latest_confirmed_block: BlockNumber) -> None:
        start = time.monotonic()

        db_block = self.database.get_latest_committed_block()
        assert db_block == self.blockchain_state.latest_committed_block, (
            f"Unexpected `latest_committed_block` in db: "
            f"was {db_block}, expected {self.blockchain_state.latest_committed_block}. "
            f"Is the db accidentally shared by two PFSes?")

        events = get_blockchain_events_adaptive(
            web3=self.web3,
            blockchain_state=self.blockchain_state,
            token_network_addresses=list(self.token_networks.keys()),
            latest_confirmed_block=latest_confirmed_block,
        )

        if events is None:
            return

        before_process = time.monotonic()
        for event in events:
            self.handle_event(event)
            gevent.idle()  # Allow answering requests in between events

        if events:
            log.info(
                "Processed events",
                getting=round(before_process - start, 2),
                processing=round(time.monotonic() - before_process, 2),
                total_duration=round(time.monotonic() - start, 2),
                event_counts=collections.Counter(e.__class__.__name__
                                                 for e in events),
            )

    def stop(self) -> None:
        self.matrix_listener.kill()
        self._is_running.set()
        self.matrix_listener.join()

    def follows_token_network(
            self, token_network_address: TokenNetworkAddress) -> bool:
        """Checks if a token network is followed by the pathfinding service."""
        return token_network_address in self.token_networks.keys()

    def get_token_network(
            self, token_network_address: TokenNetworkAddress
    ) -> Optional[TokenNetwork]:
        """Returns the `TokenNetwork` for the given address or `None` for unknown networks."""
        return self.token_networks.get(token_network_address)

    def handle_event(self, event: Event) -> None:
        with sentry_sdk.configure_scope() as scope:
            with metrics.collect_event_metrics(event):
                scope.set_extra("event", event)
                if isinstance(event, ReceiveTokenNetworkCreatedEvent):
                    self.handle_token_network_created(event)
                elif isinstance(event, ReceiveChannelOpenedEvent):
                    self.handle_channel_opened(event)
                elif isinstance(event, ReceiveChannelClosedEvent):
                    self.handle_channel_closed(event)
                elif isinstance(event, UpdatedHeadBlockEvent):
                    # TODO: Store blockhash here as well
                    self.blockchain_state.latest_committed_block = event.head_block_number
                    self.database.update_lastest_committed_block(
                        event.head_block_number)
                else:
                    log.debug("Unhandled event", evt=event)

    def handle_token_network_created(
            self, event: ReceiveTokenNetworkCreatedEvent) -> None:
        network_address = event.token_network_address
        if not self.follows_token_network(network_address):
            log.info("Found new token network", event_=event)

            self.token_networks[network_address] = TokenNetwork(
                network_address)
            self.database.upsert_token_network(network_address)

    def handle_channel_opened(self, event: ReceiveChannelOpenedEvent) -> None:
        token_network = self.get_token_network(event.token_network_address)
        if token_network is None:
            return

        log.info("Received ChannelOpened event", event_=event)

        channel = token_network.handle_channel_opened_event(
            channel_identifier=event.channel_identifier,
            participant1=event.participant1,
            participant2=event.participant2,
            settle_timeout=event.settle_timeout,
        )
        self.database.upsert_channel(channel)

        # Handle messages for this channel which where received before ChannelOpened
        with self.database.conn:
            for message in self.database.pop_waiting_messages(
                    token_network_address=token_network.address,
                    channel_id=event.channel_identifier):
                log.debug("Processing deferred message", message=message)
                self.handle_message(message)

    def handle_channel_closed(self, event: ReceiveChannelClosedEvent) -> None:
        token_network = self.get_token_network(event.token_network_address)
        if token_network is None:
            return

        log.info("Received ChannelClosed event", event_=event)

        channel_deleted = self.database.delete_channel(
            event.token_network_address, event.channel_identifier)
        if channel_deleted:
            token_network.handle_channel_closed_event(event.channel_identifier)
        else:
            log.error(
                "Received ChannelClosed event for unknown channel",
                token_network_address=event.token_network_address,
                channel_identifier=event.channel_identifier,
            )
            metrics.get_metrics_for_label(metrics.ERRORS_LOGGED,
                                          metrics.ErrorCategory.STATE).inc()

    def handle_message(self, message: Message) -> None:
        with sentry_sdk.configure_scope() as scope:
            scope.set_extra("message", message)
            try:
                with metrics.collect_message_metrics(message):
                    if isinstance(message, PFSCapacityUpdate):
                        changed_channel: Optional[
                            Channel] = self.on_capacity_update(message)
                    elif isinstance(message, PFSFeeUpdate):
                        changed_channel = self.on_fee_update(message)
                    else:
                        log.debug("Ignoring message", unknown_message=message)
                        return

                    if changed_channel:
                        self.database.upsert_channel(changed_channel)

            except DeferMessage as ex:
                self.defer_message_until_channel_is_open(ex.deferred_message)
            except InvalidGlobalMessage as ex:
                log.info(str(ex), **asdict(message))

    def defer_message_until_channel_is_open(self,
                                            message: DeferableMessage) -> None:
        log.debug(
            "Received message for unknown channel, defer until ChannelOpened is confirmed",
            channel_id=message.canonical_identifier.channel_identifier,
            message=message,
        )
        self.database.insert_waiting_message(message)

    def _validate_pfs_fee_update(self, message: PFSFeeUpdate) -> TokenNetwork:
        # check if chain_id matches
        if message.canonical_identifier.chain_identifier != self.chain_id:
            raise InvalidFeeUpdate(
                "Received Fee Update with unknown chain identifier")

        # check if token network exists
        token_network = self.get_token_network(
            message.canonical_identifier.token_network_address)
        if token_network is None:
            raise InvalidFeeUpdate(
                "Received Fee Update with unknown token network")

        # check signature of Capacity Update
        if message.sender != message.updating_participant:
            raise InvalidFeeUpdate("Fee Update not signed correctly")

        # check if channel exists
        channel_identifier = message.canonical_identifier.channel_identifier
        if channel_identifier not in token_network.channel_id_to_addresses:
            raise DeferMessage(message)

        # check if participants fit to channel id
        participants = token_network.channel_id_to_addresses[
            channel_identifier]
        if message.updating_participant not in participants:
            raise InvalidFeeUpdate(
                "Sender of Fee Update does not match the internal channel")

        # check that timestamp has no timezone
        if message.timestamp.tzinfo is not None:
            raise InvalidFeeUpdate(
                "Timestamp of Fee Update should not contain timezone")

        return token_network

    def on_fee_update(self, message: PFSFeeUpdate) -> Optional[Channel]:
        token_network = self._validate_pfs_fee_update(message)
        log.debug("Received Fee Update", message=message)

        return token_network.handle_channel_fee_update(message)

    def _validate_pfs_capacity_update(
            self, message: PFSCapacityUpdate) -> TokenNetwork:
        # check if chain_id matches
        if message.canonical_identifier.chain_identifier != self.chain_id:
            raise InvalidCapacityUpdate(
                "Received Capacity Update with unknown chain identifier")

        # check if token network exists
        token_network = self.get_token_network(
            message.canonical_identifier.token_network_address)
        if token_network is None:
            raise InvalidCapacityUpdate(
                "Received Capacity Update with unknown token network")

        # check values < max int 256
        if message.updating_capacity > UINT256_MAX:
            raise InvalidCapacityUpdate(
                "Received Capacity Update with impossible updating_capacity")
        if message.other_capacity > UINT256_MAX:
            raise InvalidCapacityUpdate(
                "Received Capacity Update with impossible other_capacity")

        # check signature of Capacity Update
        if message.sender != message.updating_participant:
            raise InvalidCapacityUpdate("Capacity Update not signed correctly")

        # check if channel exists
        channel_identifier = message.canonical_identifier.channel_identifier
        if channel_identifier not in token_network.channel_id_to_addresses:
            raise DeferMessage(message)

        # check if participants fit to channel id
        participants = token_network.channel_id_to_addresses[
            channel_identifier]
        if message.updating_participant not in participants:
            raise InvalidCapacityUpdate(
                "Sender of Capacity Update does not match the internal channel"
            )
        if message.other_participant not in participants:
            raise InvalidCapacityUpdate(
                "Other Participant of Capacity Update does not match the internal channel"
            )

        return token_network

    def on_capacity_update(self, message: PFSCapacityUpdate) -> Channel:
        token_network = self._validate_pfs_capacity_update(message)
        log.debug("Received Capacity Update", message=message)
        self.database.upsert_capacity_update(message)

        updating_capacity_partner, other_capacity_partner = self.database.get_capacity_updates(
            updating_participant=message.other_participant,
            token_network_address=TokenNetworkAddress(
                message.canonical_identifier.token_network_address),
            channel_id=message.canonical_identifier.channel_identifier,
        )
        return token_network.handle_channel_balance_update_message(
            message=message,
            updating_capacity_partner=updating_capacity_partner,
            other_capacity_partner=other_capacity_partner,
        )
Пример #12
0
    def __init__(
            self,
            web3: Web3,
            contract_manager: ContractManager,
            registry_address: Address,
            private_key: str,
            db_filename: str,
            user_deposit_contract_address: Address,
            sync_start_block: int = 0,
            required_confirmations: int = 8,
            poll_interval: int = 10,
            service_fee: int = 0,
    ):
        """ Creates a new pathfinding service

        Args:
            contract_manager: A contract manager
            token_network_listener: A blockchain listener object
            token_network_registry_listener: A blockchain listener object for the network registry
            chain_id: The id of the chain the PFS runs on
        """
        super().__init__()

        self.web3 = web3
        self.contract_manager = contract_manager
        self.registry_address = registry_address
        self.sync_start_block = sync_start_block
        self.required_confirmations = required_confirmations
        self.poll_interval = poll_interval
        self.chain_id = int(web3.net.version)
        self.private_key = private_key
        self.address = private_key_to_address(private_key)
        self.service_fee = service_fee

        self.is_running = gevent.event.Event()
        self.token_networks: Dict[Address, TokenNetwork] = {}
        self.token_network_listeners: List[BlockchainListener] = []
        self.database = PFSDatabase(
            filename=db_filename,
            pfs_address=self.address,
        )
        self.user_deposit_contract = web3.eth.contract(
            abi=self.contract_manager.get_contract_abi(
                CONTRACT_USER_DEPOSIT,
            ),
            address=user_deposit_contract_address,
        )

        log.info(
            'Starting TokenNetworkRegistry Listener',
            required_confirmations=self.required_confirmations,
        )
        self.token_network_registry_listener = BlockchainListener(
            web3=web3,
            contract_manager=self.contract_manager,
            contract_name=CONTRACT_TOKEN_NETWORK_REGISTRY,
            contract_address=self.registry_address,
            required_confirmations=self.required_confirmations,
            poll_interval=self.poll_interval,
            sync_start_block=self.sync_start_block,
        )
        log.info(
            'Listening to token network registry',
            registry_address=registry_address,
            start_block=sync_start_block,
        )
        self._setup_token_networks()

        try:
            self.matrix_listener = MatrixListener(
                private_key=private_key,
                chain_id=self.chain_id,
                callback=self.handle_message,
                service_room_suffix=PATH_FINDING_BROADCASTING_ROOM,
            )
        except ConnectionError as e:
            log.critical(
                'Could not connect to broadcasting system.',
                exc=e,
            )
            sys.exit(1)
Пример #13
0
class PathfindingService(gevent.Greenlet):
    def __init__(
            self,
            web3: Web3,
            contract_manager: ContractManager,
            registry_address: Address,
            private_key: str,
            db_filename: str,
            user_deposit_contract_address: Address,
            sync_start_block: int = 0,
            required_confirmations: int = 8,
            poll_interval: int = 10,
            service_fee: int = 0,
    ):
        """ Creates a new pathfinding service

        Args:
            contract_manager: A contract manager
            token_network_listener: A blockchain listener object
            token_network_registry_listener: A blockchain listener object for the network registry
            chain_id: The id of the chain the PFS runs on
        """
        super().__init__()

        self.web3 = web3
        self.contract_manager = contract_manager
        self.registry_address = registry_address
        self.sync_start_block = sync_start_block
        self.required_confirmations = required_confirmations
        self.poll_interval = poll_interval
        self.chain_id = int(web3.net.version)
        self.private_key = private_key
        self.address = private_key_to_address(private_key)
        self.service_fee = service_fee

        self.is_running = gevent.event.Event()
        self.token_networks: Dict[Address, TokenNetwork] = {}
        self.token_network_listeners: List[BlockchainListener] = []
        self.database = PFSDatabase(
            filename=db_filename,
            pfs_address=self.address,
        )
        self.user_deposit_contract = web3.eth.contract(
            abi=self.contract_manager.get_contract_abi(
                CONTRACT_USER_DEPOSIT,
            ),
            address=user_deposit_contract_address,
        )

        log.info(
            'Starting TokenNetworkRegistry Listener',
            required_confirmations=self.required_confirmations,
        )
        self.token_network_registry_listener = BlockchainListener(
            web3=web3,
            contract_manager=self.contract_manager,
            contract_name=CONTRACT_TOKEN_NETWORK_REGISTRY,
            contract_address=self.registry_address,
            required_confirmations=self.required_confirmations,
            poll_interval=self.poll_interval,
            sync_start_block=self.sync_start_block,
        )
        log.info(
            'Listening to token network registry',
            registry_address=registry_address,
            start_block=sync_start_block,
        )
        self._setup_token_networks()

        try:
            self.matrix_listener = MatrixListener(
                private_key=private_key,
                chain_id=self.chain_id,
                callback=self.handle_message,
                service_room_suffix=PATH_FINDING_BROADCASTING_ROOM,
            )
        except ConnectionError as e:
            log.critical(
                'Could not connect to broadcasting system.',
                exc=e,
            )
            sys.exit(1)

    def _setup_token_networks(self):
        self.token_network_registry_listener.add_confirmed_listener(
            create_registry_event_topics(self.contract_manager),
            self.handle_token_network_created,
        )

    def _run(self):
        register_error_handler(error_handler)
        self.matrix_listener.start()
        self.token_network_registry_listener.start()
        self.is_running.wait()

    def stop(self):
        self.token_network_registry_listener.stop()
        for task in self.token_network_listeners:
            task.stop()
        self.matrix_listener.stop()
        self.is_running.set()
        self.matrix_listener.join()

    def follows_token_network(self, token_network_address: Address) -> bool:
        """ Checks if a token network is followed by the pathfinding service. """
        assert is_checksum_address(token_network_address)

        return token_network_address in self.token_networks.keys()

    def _get_token_network(self, token_network_address: Address) -> Optional[TokenNetwork]:
        """ Returns the `TokenNetwork` for the given address or `None` for unknown networks. """

        assert is_checksum_address(token_network_address)
        if not self.follows_token_network(token_network_address):
            return None
        else:
            return self.token_networks[token_network_address]

    def handle_channel_event(self, event: Dict):
        event_name = event['event']

        if event_name == ChannelEvent.OPENED:
            self.handle_channel_opened(event)
        elif event_name == ChannelEvent.DEPOSIT:
            self.handle_channel_new_deposit(event)
        elif event_name == ChannelEvent.CLOSED:
            self.handle_channel_closed(event)
        else:
            log.debug('Unhandled event', evt=event)

    def handle_channel_opened(self, event: Dict):
        token_network = self._get_token_network(event['address'])

        if token_network is None:
            return

        channel_identifier = event['args']['channel_identifier']
        participant1 = event['args']['participant1']
        participant2 = event['args']['participant2']
        settle_timeout = event['args']['settle_timeout']

        log.info(
            'Received ChannelOpened event',
            token_network_address=token_network.address,
            channel_identifier=channel_identifier,
            participant1=participant1,
            participant2=participant2,
            settle_timeout=settle_timeout,
        )

        token_network.handle_channel_opened_event(
            channel_identifier,
            participant1,
            participant2,
            settle_timeout,
        )

    def handle_channel_new_deposit(self, event: Dict):
        token_network = self._get_token_network(event['address'])

        if token_network is None:
            return

        channel_identifier = event['args']['channel_identifier']
        participant_address = event['args']['participant']
        total_deposit = event['args']['total_deposit']

        log.info(
            'Received ChannelNewDeposit event',
            token_network_address=token_network.address,
            channel_identifier=channel_identifier,
            participant=participant_address,
            total_deposit=total_deposit,
        )

        token_network.handle_channel_new_deposit_event(
            channel_identifier,
            participant_address,
            total_deposit,
        )

    def handle_channel_closed(self, event: Dict):
        token_network = self._get_token_network(event['address'])

        if token_network is None:
            return

        channel_identifier = event['args']['channel_identifier']

        log.info(
            'Received ChannelClosed event',
            token_network_address=token_network.address,
            channel_identifier=channel_identifier,
        )

        token_network.handle_channel_closed_event(channel_identifier)

    def handle_token_network_created(self, event):
        token_network_address = event['args']['token_network_address']
        token_address = event['args']['token_address']
        event_block_number = event['blockNumber']

        assert is_checksum_address(token_network_address)
        assert is_checksum_address(token_address)

        if not self.follows_token_network(token_network_address):
            log.info(
                'Found new token network',
                token_address=token_address,
                token_network_address=token_network_address,
            )
            self.create_token_network_for_address(
                token_network_address,
                token_address,
                event_block_number,
            )

    def create_token_network_for_address(
        self,
        token_network_address: Address,
        token_address: Address,
        block_number: int = 0,
    ):
        token_network = TokenNetwork(token_network_address, token_address)
        self.token_networks[token_network_address] = token_network

        log.debug('Creating token network model', token_network_address=token_network_address)
        token_network_listener = BlockchainListener(
            web3=self.web3,
            contract_manager=self.contract_manager,
            contract_address=token_network_address,
            contract_name=CONTRACT_TOKEN_NETWORK,
            required_confirmations=self.required_confirmations,
            poll_interval=self.poll_interval,
            sync_start_block=block_number,
        )

        # subscribe to event notifications from blockchain listener
        token_network_listener.add_confirmed_listener(
            create_channel_event_topics(),
            self.handle_channel_event,
        )
        token_network_listener.start()
        self.token_network_listeners.append(token_network_listener)

    def handle_message(self, message: SignedMessage):
        if isinstance(message, UpdatePFS):
            try:
                self.on_pfs_update(message)
            except InvalidCapacityUpdate as x:
                log.info(
                    str(x),
                    chain_id=message.canonical_identifier.chain_identifier,
                    token_network_address=message.canonical_identifier.token_network_address,
                    channel_identifier=message.canonical_identifier.channel_identifier,
                    updating_capacity=message.updating_capacity,
                    other_capacity=message.updating_capacity,
                )
        else:
            log.info('Ignoring unknown message type')

    def on_pfs_update(self, message: UpdatePFS):
        token_network_address = to_checksum_address(
            message.canonical_identifier.token_network_address,
        )
        log.info(
            'Received Capacity Update',
            token_network_address=token_network_address,
            channel_identifier=message.canonical_identifier.channel_identifier,
        )

        assert is_checksum_address(message.updating_participant)
        assert is_checksum_address(message.other_participant)

        # check if chain_id matches
        if message.canonical_identifier.chain_identifier != self.chain_id:
            raise InvalidCapacityUpdate('Received Capacity Update with unknown chain identifier')

        # check if token network exists
        token_network = self._get_token_network(token_network_address)
        if token_network is None:
            raise InvalidCapacityUpdate('Received Capacity Update with unknown token network')

        # check if channel exists
        channel_identifier = message.canonical_identifier.channel_identifier
        if channel_identifier not in token_network.channel_id_to_addresses:
            raise InvalidCapacityUpdate(
                'Received Capacity Update with unknown channel identifier in token network',
            )

        # TODO: check signature of message

        # check values < max int 256
        if message.updating_capacity > UINT256_MAX:
            raise InvalidCapacityUpdate(
                'Received Capacity Update with impossible updating_capacity',
            )
        if message.other_capacity > UINT256_MAX:
            raise InvalidCapacityUpdate(
                'Received Capacity Update with impossible other_capacity',
            )

        # check if participants fit to channel id
        participants = token_network.channel_id_to_addresses[channel_identifier]
        if message.updating_participant not in participants:
            raise InvalidCapacityUpdate(
                'Sender of Capacity Update does not match the internal channel',
            )
        if message.other_participant not in participants:
            raise InvalidCapacityUpdate(
                'Other Participant of Capacity Update does not match the internal channel',
            )

        # check if nonce is higher than current nonce
        view_to_partner, view_from_partner = token_network.get_channel_views_for_partner(
            channel_identifier=channel_identifier,
            updating_participant=message.updating_participant,
            other_participant=message.other_participant,
        )

        valid_nonces = (
            message.updating_nonce <= view_to_partner.update_nonce and
            message.other_nonce <= view_from_partner.update_nonce
        )
        if valid_nonces:
            raise InvalidCapacityUpdate('Capacity Update already received')

        token_network.handle_channel_balance_update_message(
            channel_identifier=message.canonical_identifier.channel_identifier,
            updating_participant=message.updating_participant,
            other_participant=message.other_participant,
            updating_nonce=message.updating_nonce,
            other_nonce=message.other_nonce,
            updating_capacity=message.updating_capacity,
            other_capacity=message.other_capacity,
            reveal_timeout=message.reveal_timeout,
        )
Пример #14
0
class PathfindingService(gevent.Greenlet):
    def __init__(
        self,
        web3: Web3,
        contracts: Dict[str, Contract],
        private_key: str,
        db_filename: str,
        sync_start_block: BlockNumber = BlockNumber(0),
        required_confirmations: int = 8,
        poll_interval: float = 10,
        service_fee: int = 0,
    ):
        super().__init__()

        self.web3 = web3
        self.registry_address = contracts[CONTRACT_TOKEN_NETWORK_REGISTRY].address
        self.sync_start_block = sync_start_block
        self.required_confirmations = required_confirmations
        self.poll_interval = poll_interval
        self.chain_id = ChainID(int(web3.net.version))
        self.private_key = private_key
        self.address = private_key_to_address(private_key)
        self.service_fee = service_fee

        self.is_running = gevent.event.Event()
        self.token_networks: Dict[TokenNetworkAddress, TokenNetwork] = {}
        self.database = PFSDatabase(filename=db_filename, pfs_address=self.address)
        self.user_deposit_contract = contracts[CONTRACT_USER_DEPOSIT]

        self.last_known_block = 0
        self.blockchain_state = BlockchainState(
            chain_id=self.chain_id,
            token_network_registry_address=self.registry_address,
            monitor_contract_address=Address(''),  # FIXME
            latest_known_block=self.sync_start_block,
            token_network_addresses=[],
        )
        log.info(
            'Listening to token network registry',
            registry_address=self.registry_address,
            start_block=sync_start_block,
        )

        try:
            self.matrix_listener = MatrixListener(
                private_key=private_key,
                chain_id=self.chain_id,
                callback=self.handle_message,
                service_room_suffix=PATH_FINDING_BROADCASTING_ROOM,
            )
        except ConnectionError as e:
            log.critical('Could not connect to broadcasting system.', exc=e)
            sys.exit(1)

    def _run(self) -> None:  # pylint: disable=method-hidden
        register_error_handler(error_handler)
        self.matrix_listener.start()
        while not self.is_running.is_set():
            last_confirmed_block = self.web3.eth.blockNumber - self.required_confirmations

            max_query_interval_end_block = (
                self.blockchain_state.latest_known_block + MAX_FILTER_INTERVAL
            )
            # Limit the max number of blocks that is processed per iteration
            last_block = min(last_confirmed_block, max_query_interval_end_block)

            self._process_new_blocks(last_block)

            try:
                gevent.sleep(self.poll_interval)
            except KeyboardInterrupt:
                log.info('Shutting down')
                sys.exit(0)

    def _process_new_blocks(self, last_block: BlockNumber) -> None:
        self.last_known_block = last_block

        # BCL return a new state and events related to channel lifecycle
        new_chain_state, events = get_blockchain_events(
            web3=self.web3,
            contract_manager=CONTRACT_MANAGER,
            chain_state=self.blockchain_state,
            to_block=last_block,
            query_ms=False,
        )

        # If a new token network was found we need to write it to the DB, otherwise
        # the constraints for new channels will not be constrained. But only update
        # the network addresses here, all else is done later.
        token_networks_changed = (
            self.blockchain_state.token_network_addresses
            != new_chain_state.token_network_addresses
        )
        if token_networks_changed:
            self.blockchain_state.token_network_addresses = new_chain_state.token_network_addresses
        #     self.context.db.update_state(self.context.ms_state)

        # Now set the updated chain state to the context, will be stored later
        self.blockchain_state = new_chain_state
        for event in events:
            self.handle_channel_event(event)

        self.blockchain_state.latest_known_block = last_block

    def stop(self) -> None:
        self.matrix_listener.stop()
        self.is_running.set()
        self.matrix_listener.join()

    def follows_token_network(self, token_network_address: TokenNetworkAddress) -> bool:
        """ Checks if a token network is followed by the pathfinding service. """
        return token_network_address in self.token_networks.keys()

    def get_token_network(
        self, token_network_address: TokenNetworkAddress
    ) -> Optional[TokenNetwork]:
        """ Returns the `TokenNetwork` for the given address or `None` for unknown networks. """
        return self.token_networks.get(token_network_address)

    def handle_channel_event(self, event: Event) -> None:
        if isinstance(event, ReceiveTokenNetworkCreatedEvent):
            self.handle_token_network_created(event)
        elif isinstance(event, ReceiveChannelOpenedEvent):
            self.handle_channel_opened(event)
        elif isinstance(event, ReceiveChannelNewDepositEvent):
            self.handle_channel_new_deposit(event)
        elif isinstance(event, ReceiveChannelClosedEvent):
            self.handle_channel_closed(event)
        else:
            log.debug('Unhandled event', evt=event)

    def handle_token_network_created(self, event: ReceiveTokenNetworkCreatedEvent) -> None:
        network_address = TokenNetworkAddress(event.token_network_address)
        if not self.follows_token_network(network_address):
            log.info('Found new token network', **asdict(event))

            self.token_networks[network_address] = TokenNetwork(network_address)

    def handle_channel_opened(self, event: ReceiveChannelOpenedEvent) -> None:
        token_network = self.get_token_network(event.token_network_address)
        if token_network is None:
            return

        log.info('Received ChannelOpened event', **asdict(event))

        token_network.handle_channel_opened_event(
            channel_identifier=event.channel_identifier,
            participant1=event.participant1,
            participant2=event.participant2,
            settle_timeout=event.settle_timeout,
        )

    def handle_channel_new_deposit(self, event: ReceiveChannelNewDepositEvent) -> None:
        token_network = self.get_token_network(event.token_network_address)
        if token_network is None:
            return

        log.info('Received ChannelNewDeposit event', **asdict(event))

        token_network.handle_channel_new_deposit_event(
            channel_identifier=event.channel_identifier,
            receiver=event.participant_address,
            total_deposit=event.total_deposit,
        )

    def handle_channel_closed(self, event: ReceiveChannelClosedEvent) -> None:
        token_network = self.get_token_network(event.token_network_address)
        if token_network is None:
            return

        log.info('Received ChannelClosed event', **asdict(event))

        token_network.handle_channel_closed_event(channel_identifier=event.channel_identifier)

    def handle_message(self, message: SignedMessage) -> None:
        if isinstance(message, UpdatePFS):
            try:
                self.on_pfs_update(message)
            except InvalidCapacityUpdate as x:
                log.info(str(x), **message.to_dict())
        else:
            log.info('Ignoring unknown message type')

    def on_pfs_update(self, message: UpdatePFS) -> None:
        token_network_address = to_checksum_address(
            message.canonical_identifier.token_network_address
        )

        updating_participant = to_checksum_address(message.updating_participant)
        other_participant = to_checksum_address(message.other_participant)

        # check if chain_id matches
        if message.canonical_identifier.chain_identifier != self.chain_id:
            raise InvalidCapacityUpdate('Received Capacity Update with unknown chain identifier')

        # check if token network exists
        token_network = self.get_token_network(token_network_address)
        if token_network is None:
            raise InvalidCapacityUpdate('Received Capacity Update with unknown token network')

        # check if channel exists
        channel_identifier = message.canonical_identifier.channel_identifier
        if channel_identifier not in token_network.channel_id_to_addresses:
            raise InvalidCapacityUpdate(
                'Received Capacity Update with unknown channel identifier in token network'
            )

        # check values < max int 256
        if message.updating_capacity > UINT256_MAX:
            raise InvalidCapacityUpdate(
                'Received Capacity Update with impossible updating_capacity'
            )
        if message.other_capacity > UINT256_MAX:
            raise InvalidCapacityUpdate('Received Capacity Update with impossible other_capacity')

        # check if participants fit to channel id
        participants = token_network.channel_id_to_addresses[channel_identifier]
        if updating_participant not in participants:
            raise InvalidCapacityUpdate(
                'Sender of Capacity Update does not match the internal channel'
            )
        if other_participant not in participants:
            raise InvalidCapacityUpdate(
                'Other Participant of Capacity Update does not match the internal channel'
            )

        # check signature of Capacity Update
        signer = recover_signer_from_capacity_update(message)
        if signer != updating_participant:
            raise InvalidCapacityUpdate('Capacity Update not signed correctly')

        # check if nonce is higher than current nonce
        view_to_partner, view_from_partner = token_network.get_channel_views_for_partner(
            channel_identifier=channel_identifier,
            updating_participant=updating_participant,
            other_participant=other_participant,
        )

        is_nonce_pair_known = (
            message.updating_nonce <= view_to_partner.update_nonce
            and message.other_nonce <= view_from_partner.update_nonce
        )
        if is_nonce_pair_known:
            raise InvalidCapacityUpdate('Capacity Update already received')

        log.info('Received Capacity Update', **message.to_dict())

        token_network.handle_channel_balance_update_message(
            channel_identifier=message.canonical_identifier.channel_identifier,
            updating_participant=updating_participant,
            other_participant=other_participant,
            updating_nonce=message.updating_nonce,
            other_nonce=message.other_nonce,
            updating_capacity=message.updating_capacity,
            other_capacity=message.other_capacity,
            reveal_timeout=message.reveal_timeout,
            mediation_fee=message.mediation_fee,
        )
Пример #15
0
class PathfindingService(gevent.Greenlet):
    # pylint: disable=too-many-instance-attributes
    def __init__(  # pylint: disable=too-many-arguments
        self,
        web3: Web3,
        contracts: Dict[str, Contract],
        private_key: str,
        db_filename: str,
        sync_start_block: BlockNumber = BlockNumber(0),
        required_confirmations: int = 8,
        poll_interval: float = 10,
    ):
        super().__init__()

        self.web3 = web3
        self.registry_address = contracts[CONTRACT_TOKEN_NETWORK_REGISTRY].address
        self.user_deposit_contract = contracts[CONTRACT_USER_DEPOSIT]
        self.chain_id = ChainID(int(web3.net.version))
        self.address = private_key_to_address(private_key)
        self._required_confirmations = required_confirmations
        self._poll_interval = poll_interval
        self._is_running = gevent.event.Event()

        self.database = PFSDatabase(
            filename=db_filename,
            pfs_address=self.address,
            sync_start_block=sync_start_block,
            token_network_registry_address=self.registry_address,
            chain_id=self.chain_id,
            user_deposit_contract_address=self.user_deposit_contract.address,
            allow_create=True,
        )
        self.token_networks = self._load_token_networks()

        try:
            self.matrix_listener = MatrixListener(
                private_key=private_key,
                chain_id=self.chain_id,
                callback=self.handle_message,
                service_room_suffix=PATH_FINDING_BROADCASTING_ROOM,
            )
        except ConnectionError as exc:
            log.critical("Could not connect to broadcasting system.", exc=exc)
            sys.exit(1)

    def _load_token_networks(self) -> Dict[TokenNetworkAddress, TokenNetwork]:
        network_for_address = {n.address: n for n in self.database.get_token_networks()}
        channel_views = self.database.get_channel_views()
        for cv in channel_views:
            network_for_address[cv.token_network_address].add_channel_view(cv)
        return network_for_address

    def _run(self) -> None:  # pylint: disable=method-hidden
        register_error_handler()
        self.matrix_listener.start()

        log.info(
            "Listening to token network registry",
            registry_address=self.registry_address,
            start_block=self.database.get_latest_known_block(),
        )
        while not self._is_running.is_set():
            last_confirmed_block = self.web3.eth.blockNumber - self._required_confirmations

            max_query_interval_end_block = (
                self.database.get_latest_known_block() + MAX_FILTER_INTERVAL
            )
            # Limit the max number of blocks that is processed per iteration
            last_block = min(last_confirmed_block, max_query_interval_end_block)

            self._process_new_blocks(last_block)

            try:
                gevent.sleep(self._poll_interval)
            except KeyboardInterrupt:
                log.info("Shutting down")
                sys.exit(0)

    def _process_new_blocks(self, last_block: BlockNumber) -> None:
        _, events = get_blockchain_events(
            web3=self.web3,
            contract_manager=CONTRACT_MANAGER,
            chain_state=BlockchainState(
                latest_known_block=self.database.get_latest_known_block(),
                token_network_addresses=list(self.token_networks.keys()),
                token_network_registry_address=self.registry_address,
                monitor_contract_address=Address(""),  # FIXME
                chain_id=self.chain_id,
            ),
            to_block=last_block,
            query_ms=False,
        )
        for event in events:
            self.handle_event(event)

    def stop(self) -> None:
        self.matrix_listener.stop()
        self._is_running.set()
        self.matrix_listener.join()

    def follows_token_network(self, token_network_address: TokenNetworkAddress) -> bool:
        """ Checks if a token network is followed by the pathfinding service. """
        return token_network_address in self.token_networks.keys()

    def get_token_network(
        self, token_network_address: TokenNetworkAddress
    ) -> Optional[TokenNetwork]:
        """ Returns the `TokenNetwork` for the given address or `None` for unknown networks. """
        return self.token_networks.get(token_network_address)

    def handle_event(self, event: Event) -> None:
        if isinstance(event, ReceiveTokenNetworkCreatedEvent):
            self.handle_token_network_created(event)
        elif isinstance(event, ReceiveChannelOpenedEvent):
            self.handle_channel_opened(event)
        elif isinstance(event, ReceiveChannelNewDepositEvent):
            self.handle_channel_new_deposit(event)
        elif isinstance(event, ReceiveChannelClosedEvent):
            self.handle_channel_closed(event)
        elif isinstance(event, UpdatedHeadBlockEvent):
            self.database.update_lastest_known_block(event.head_block_number)
        else:
            log.debug("Unhandled event", evt=event)

    def handle_token_network_created(self, event: ReceiveTokenNetworkCreatedEvent) -> None:
        network_address = TokenNetworkAddress(event.token_network_address)
        if not self.follows_token_network(network_address):
            log.info("Found new token network", **asdict(event))

            self.token_networks[network_address] = TokenNetwork(network_address)
            self.database.upsert_token_network(network_address)

    def handle_channel_opened(self, event: ReceiveChannelOpenedEvent) -> None:
        token_network = self.get_token_network(event.token_network_address)
        if token_network is None:
            return

        log.info("Received ChannelOpened event", **asdict(event))

        channel_views = token_network.handle_channel_opened_event(
            channel_identifier=event.channel_identifier,
            participant1=event.participant1,
            participant2=event.participant2,
            settle_timeout=event.settle_timeout,
        )
        for cv in channel_views:
            self.database.upsert_channel_view(cv)

    def handle_channel_new_deposit(self, event: ReceiveChannelNewDepositEvent) -> None:
        token_network = self.get_token_network(event.token_network_address)
        if token_network is None:
            return

        log.info("Received ChannelNewDeposit event", **asdict(event))

        channel_view = token_network.handle_channel_new_deposit_event(
            channel_identifier=event.channel_identifier,
            receiver=event.participant_address,
            total_deposit=event.total_deposit,
        )
        if channel_view:
            self.database.upsert_channel_view(channel_view)

    def handle_channel_closed(self, event: ReceiveChannelClosedEvent) -> None:
        token_network = self.get_token_network(event.token_network_address)
        if token_network is None:
            return

        log.info("Received ChannelClosed event", **asdict(event))

        token_network.handle_channel_closed_event(channel_identifier=event.channel_identifier)
        self.database.delete_channel_views(event.channel_identifier)

    def handle_message(self, message: SignedMessage) -> None:
        if isinstance(message, UpdatePFS):
            try:
                self.on_pfs_update(message)
            except InvalidCapacityUpdate as x:
                log.info(str(x), **message.to_dict())
        else:
            log.info("Ignoring unknown message type")

    def _validate_pfs_update(self, message: UpdatePFS) -> TokenNetwork:
        token_network_address = to_checksum_address(
            message.canonical_identifier.token_network_address
        )

        updating_participant = to_checksum_address(message.updating_participant)
        other_participant = to_checksum_address(message.other_participant)

        # check if chain_id matches
        if message.canonical_identifier.chain_identifier != self.chain_id:
            raise InvalidCapacityUpdate("Received Capacity Update with unknown chain identifier")

        # check if token network exists
        token_network = self.get_token_network(token_network_address)
        if token_network is None:
            raise InvalidCapacityUpdate("Received Capacity Update with unknown token network")

        # check if channel exists
        channel_identifier = message.canonical_identifier.channel_identifier
        if channel_identifier not in token_network.channel_id_to_addresses:
            raise InvalidCapacityUpdate(
                "Received Capacity Update with unknown channel identifier in token network"
            )

        # check values < max int 256
        if message.updating_capacity > UINT256_MAX:
            raise InvalidCapacityUpdate(
                "Received Capacity Update with impossible updating_capacity"
            )
        if message.other_capacity > UINT256_MAX:
            raise InvalidCapacityUpdate("Received Capacity Update with impossible other_capacity")

        # check if participants fit to channel id
        participants = token_network.channel_id_to_addresses[channel_identifier]
        if updating_participant not in participants:
            raise InvalidCapacityUpdate(
                "Sender of Capacity Update does not match the internal channel"
            )
        if other_participant not in participants:
            raise InvalidCapacityUpdate(
                "Other Participant of Capacity Update does not match the internal channel"
            )

        # check signature of Capacity Update
        signer = to_checksum_address(message.sender)  # recover address from signature
        if not is_same_address(signer, updating_participant):
            raise InvalidCapacityUpdate("Capacity Update not signed correctly")

        # check if nonce is higher than current nonce
        view_to_partner, view_from_partner = token_network.get_channel_views_for_partner(
            channel_identifier=channel_identifier,
            updating_participant=updating_participant,
            other_participant=other_participant,
        )
        is_nonce_pair_known = (
            message.updating_nonce <= view_to_partner.update_nonce
            and message.other_nonce <= view_from_partner.update_nonce
        )
        if is_nonce_pair_known:
            raise InvalidCapacityUpdate("Capacity Update already received")

        return token_network

    def on_pfs_update(self, message: UpdatePFS) -> None:
        token_network = self._validate_pfs_update(message)
        log.info("Received Capacity Update", **message.to_dict())
        token_network.handle_channel_balance_update_message(message)
Пример #16
0
class RequestCollector(gevent.Greenlet):
    def __init__(self, private_key: str, state_db: SharedDatabase):
        super().__init__()

        self.private_key = private_key
        self.state_db = state_db

        state = self.state_db.load_state()
        self.chain_id = state.blockchain_state.chain_id
        self.matrix_listener = MatrixListener(
            private_key=private_key,
            chain_id=self.chain_id,
            service_room_suffix=MONITORING_BROADCASTING_ROOM,
            message_received_callback=self.handle_message,
        )

    def listen_forever(self) -> None:
        self.matrix_listener.listen_forever()

    def _run(self) -> None:  # pylint: disable=method-hidden
        register_error_handler()

        try:
            self.matrix_listener.start()
        except ConnectionError as exc:
            log.critical("Could not connect to broadcasting system.", exc=exc)
            sys.exit(1)

    def stop(self) -> None:
        self.matrix_listener.stop()
        self.matrix_listener.join()

    def handle_message(self, message: Message) -> None:
        if isinstance(message, RequestMonitoring):
            self.on_monitor_request(message)
        else:
            log.debug("Ignoring message", message=message)

    def on_monitor_request(self,
                           request_monitoring: RequestMonitoring) -> None:
        assert isinstance(request_monitoring, RequestMonitoring)
        assert request_monitoring.non_closing_signature is not None
        assert request_monitoring.reward_proof_signature is not None

        # Convert Raiden's RequestMonitoring object to a MonitorRequest
        try:
            monitor_request = MonitorRequest(
                channel_identifier=request_monitoring.balance_proof.
                channel_identifier,
                token_network_address=TokenNetworkAddress(
                    request_monitoring.balance_proof.token_network_address),
                chain_id=request_monitoring.balance_proof.chain_id,
                balance_hash=encode_hex(
                    request_monitoring.balance_proof.balance_hash),
                nonce=request_monitoring.balance_proof.nonce,
                additional_hash=encode_hex(
                    request_monitoring.balance_proof.additional_hash),
                closing_signature=request_monitoring.balance_proof.signature,
                non_closing_signature=request_monitoring.non_closing_signature,
                reward_amount=request_monitoring.reward_amount,
                # FIXME: not sure why the Signature call is necessary
                reward_proof_signature=Signature(request_monitoring.signature),
            )
        except InvalidSignature:
            log.info("Ignore MR with invalid signature",
                     monitor_request=request_monitoring)
            return

        # Validate MR
        if monitor_request.chain_id != self.chain_id:
            log.debug("Bad chain_id",
                      monitor_request=monitor_request,
                      expected=self.chain_id)
            return

        # Check that received MR is newer by comparing nonces
        old_mr = self.state_db.get_monitor_request(
            token_network_address=monitor_request.token_network_address,
            channel_id=monitor_request.channel_identifier,
            non_closing_signer=monitor_request.non_closing_signer,
        )
        if old_mr and old_mr.nonce >= monitor_request.nonce:
            log.debug(
                "New MR does not have a newer nonce.",
                token_network_address=monitor_request.token_network_address,
                channel_identifier=monitor_request.channel_identifier,
                received_nonce=monitor_request.nonce,
                known_nonce=old_mr.nonce,
            )
            return

        log.info(
            "Received new MR",
            token_network_address=monitor_request.token_network_address,
            channel_identifier=monitor_request.channel_identifier,
            nonce=monitor_request.nonce,
            signer=monitor_request.signer,
            non_closing_signer=monitor_request.non_closing_signer,
            reward_signer=monitor_request.reward_proof_signer,
            reward_amount=monitor_request.reward_amount,
        )

        with self.state_db.conn:
            self.state_db.upsert_monitor_request(monitor_request)
Пример #17
0
class RequestCollector(gevent.Greenlet):
    def __init__(self,
                 private_key: str,
                 state_db: SharedDatabase,
                 matrix_servers: List[str] = None):
        super().__init__()

        self.private_key = private_key
        self.state_db = state_db

        state = self.state_db.load_state()
        self.chain_id = state.blockchain_state.chain_id
        self.matrix_listener = MatrixListener(
            private_key=private_key,
            chain_id=self.chain_id,
            service_room_suffix=MONITORING_BROADCASTING_ROOM,
            message_received_callback=self.handle_message,
            servers=matrix_servers,
        )

    def listen_forever(self) -> None:
        self.matrix_listener.listen_forever()

    def _run(self) -> None:  # pylint: disable=method-hidden
        register_error_handler()

        try:
            self.matrix_listener.start()
            self.matrix_listener.startup_finished.wait(
                timeout=MATRIX_START_TIMEOUT)
        except (Timeout, ConnectionError) as exc:
            log.critical("Could not connect to broadcasting system.", exc=exc)
            sys.exit(1)

    def stop(self) -> None:
        self.matrix_listener.stop()
        self.matrix_listener.join()

    def handle_message(self, message: Message) -> None:
        try:
            if isinstance(message, RequestMonitoring):
                self.on_monitor_request(message)
            else:
                log.debug("Ignoring message", message=message)
        # add more advanced exception catching
        except AssertionError as ex:
            log.error("Error while handling message", message=message, _exc=ex)

    def on_monitor_request(self,
                           request_monitoring: RequestMonitoring) -> None:
        assert isinstance(request_monitoring, RequestMonitoring)
        assert request_monitoring.non_closing_signature is not None
        assert request_monitoring.reward_proof_signature is not None

        # Convert Raiden's RequestMonitoring object to a MonitorRequest
        try:
            monitor_request = MonitorRequest(
                channel_identifier=request_monitoring.balance_proof.
                channel_identifier,
                token_network_address=TokenNetworkAddress(
                    request_monitoring.balance_proof.token_network_address),
                chain_id=request_monitoring.balance_proof.chain_id,
                balance_hash=encode_hex(
                    request_monitoring.balance_proof.balance_hash),
                nonce=request_monitoring.balance_proof.nonce,
                additional_hash=encode_hex(
                    request_monitoring.balance_proof.additional_hash),
                closing_signature=request_monitoring.balance_proof.signature,
                non_closing_signature=request_monitoring.non_closing_signature,
                reward_amount=request_monitoring.reward_amount,
                non_closing_participant=request_monitoring.
                non_closing_participant,
                reward_proof_signature=request_monitoring.signature,
                msc_address=request_monitoring.
                monitoring_service_contract_address,
            )
        except InvalidSignature:
            log.info("Ignore MR with invalid signature",
                     monitor_request=request_monitoring)
            return

        # Validate MR
        if monitor_request.chain_id != self.chain_id:
            log.debug("Bad chain_id",
                      monitor_request=monitor_request,
                      expected=self.chain_id)
            return
        if monitor_request.non_closing_signer != monitor_request.non_closing_participant:
            log.info("MR not signed by non_closing_participant",
                     monitor_request=monitor_request)
            return
        if monitor_request.non_closing_signer != monitor_request.reward_proof_signer:
            log.debug("The two MR signatures don't match",
                      monitor_request=monitor_request)
            return

        # Ignore MRs for channels that are already closed for a while.
        # We need to do this to prevent clients from wasting the MS' gas by
        # updating the BP after the MS has already called `monitor`, see
        # https://github.com/raiden-network/raiden-services/issues/504.
        close_age = self.state_db.channel_close_age(
            token_network_address=monitor_request.token_network_address,
            channel_id=monitor_request.channel_identifier,
        )
        # This is x blocks after that event is already confirmed, so that should be plenty!
        if close_age is not None and close_age >= CHANNEL_CLOSE_MARGIN:
            log.warning(
                "Ignore MR for long closed channel",
                monitor_request=monitor_request,
                close_age=close_age,
            )
            return

        # Check that received MR is newer by comparing nonces
        old_mr = self.state_db.get_monitor_request(
            token_network_address=monitor_request.token_network_address,
            channel_id=monitor_request.channel_identifier,
            non_closing_signer=monitor_request.non_closing_signer,
        )
        if old_mr and old_mr.nonce >= monitor_request.nonce:
            log.debug(
                "New MR does not have a newer nonce.",
                token_network_address=monitor_request.token_network_address,
                channel_identifier=monitor_request.channel_identifier,
                received_nonce=monitor_request.nonce,
                known_nonce=old_mr.nonce,
            )
            return

        log.info(
            "Received new MR",
            token_network_address=monitor_request.token_network_address,
            channel_identifier=monitor_request.channel_identifier,
            nonce=monitor_request.nonce,
            signer=monitor_request.signer,
            non_closing_signer=monitor_request.non_closing_signer,
            reward_signer=monitor_request.reward_proof_signer,
            reward_amount=monitor_request.reward_amount,
        )

        self.state_db.upsert_monitor_request(monitor_request)
Пример #18
0
class RequestCollector(gevent.Greenlet):
    def __init__(
        self,
        private_key: str,
        state_db: SharedDatabase,
    ):
        super().__init__()

        self.private_key = private_key
        self.state_db = state_db

        state = self.state_db.load_state()
        try:
            self.matrix_listener = MatrixListener(
                private_key=private_key,
                chain_id=state.blockchain_state.chain_id,
                callback=self.handle_message,
                service_room_suffix=MONITORING_BROADCASTING_ROOM,
            )
        except ConnectionError as e:
            log.critical(
                'Could not connect to broadcasting system.',
                exc=e,
            )
            sys.exit(1)

    def listen_forever(self):
        self.matrix_listener.listen_forever()

    def _run(self):
        register_error_handler(error_handler)
        self.matrix_listener.start()

    def stop(self):
        self.matrix_listener.stop()
        self.matrix_listener.join()

    def handle_message(self, message: SignedMessage):
        if isinstance(message, RequestMonitoring):
            self.on_monitor_request(message)
        else:
            log.info('Ignoring unknown message type')

    def on_monitor_request(
        self,
        request_monitoring: RequestMonitoring,
    ):
        assert isinstance(request_monitoring, RequestMonitoring)

        # Convert Raiden's RequestMonitoring object to a MonitorRequest
        try:
            monitor_request = MonitorRequest(
                channel_identifier=request_monitoring.balance_proof.
                channel_identifier,
                token_network_address=to_checksum_address(
                    request_monitoring.balance_proof.token_network_address, ),
                chain_id=request_monitoring.balance_proof.chain_id,
                balance_hash=encode_hex(
                    request_monitoring.balance_proof.balance_hash),
                nonce=request_monitoring.balance_proof.nonce,
                additional_hash=encode_hex(
                    request_monitoring.balance_proof.additional_hash),
                closing_signature=encode_hex(
                    request_monitoring.balance_proof.signature),
                non_closing_signature=encode_hex(
                    request_monitoring.non_closing_signature),
                reward_amount=request_monitoring.reward_amount,
                reward_proof_signature=encode_hex(
                    request_monitoring.signature),
            )
        except InvalidSignature:
            log.info(
                'Ignore MR with invalid signature',
                monitor_request=request_monitoring,
            )
            return

        # Check that received MR is newer by comparing nonces
        old_mr = self.state_db.get_monitor_request(
            token_network_address=monitor_request.token_network_address,
            channel_id=monitor_request.channel_identifier,
            non_closing_signer=monitor_request.non_closing_signer,
        )
        if old_mr and old_mr.nonce >= monitor_request.nonce:
            log.debug(
                'New MR does not have a newer nonce.',
                token_network_address=monitor_request.token_network_address,
                channel_identifier=monitor_request.channel_identifier,
                received_nonce=monitor_request.nonce,
                known_nonce=old_mr.nonce,
            )
            return

        log.info(
            'Received new MR',
            token_network_address=monitor_request.token_network_address,
            channel_identifier=monitor_request.channel_identifier,
            nonce=monitor_request.nonce,
            signer=monitor_request.signer,
            non_closing_signer=monitor_request.non_closing_signer,
            reward_signer=monitor_request.reward_proof_signer,
            reward_amount=monitor_request.reward_amount,
        )

        with self.state_db.conn:
            self.state_db.upsert_monitor_request(monitor_request)