Esempio n. 1
0
    def __init__(  # pylint: disable=too-many-arguments
        self,
        web3: Web3,
        contracts: Dict[str, Contract],
        private_key: PrivateKey,
        db_filename: str,
        sync_start_block: BlockNumber,
        required_confirmations: BlockTimeout,
        poll_interval: float,
        matrix_servers: Optional[List[str]] = None,
    ):
        super().__init__()

        self.web3 = web3
        self.registry_address = contracts[
            CONTRACT_TOKEN_NETWORK_REGISTRY].address
        self.user_deposit_contract = contracts[CONTRACT_USER_DEPOSIT]
        self.service_token_address = self.user_deposit_contract.functions.token(
        ).call()
        self.chain_id = ChainID(web3.eth.chainId)
        self.address = private_key_to_address(private_key)
        self.required_confirmations = required_confirmations
        self._poll_interval = poll_interval
        self._is_running = gevent.event.Event()

        log.info("PFS payment address", address=self.address)

        self.database = PFSDatabase(
            filename=db_filename,
            pfs_address=self.address,
            sync_start_block=sync_start_block,
            token_network_registry_address=to_canonical_address(
                self.registry_address),
            chain_id=self.chain_id,
            user_deposit_contract_address=to_canonical_address(
                self.user_deposit_contract.address),
            allow_create=True,
        )

        self.blockchain_state = BlockchainState(
            latest_committed_block=self.database.get_latest_committed_block(),
            token_network_registry_address=to_canonical_address(
                self.registry_address),
            chain_id=self.chain_id,
        )

        self.matrix_listener = MatrixListener(
            private_key=private_key,
            chain_id=self.chain_id,
            device_id=DeviceIDs.PFS,
            message_received_callback=self.handle_message,
            servers=matrix_servers,
        )

        self.token_networks = self._load_token_networks()
        self.updated = gevent.event.Event(
        )  # set whenever blocks are processed
        self.startup_finished = gevent.event.AsyncResult()

        self._init_metrics()
Esempio n. 2
0
    def __init__(  # pylint: disable=too-many-arguments
        self,
        web3: Web3,
        contracts: Dict[str, Contract],
        private_key: str,
        db_filename: str,
        sync_start_block: BlockNumber = BlockNumber(0),
        required_confirmations: int = 8,
        poll_interval: float = 10,
        matrix_servers: List[str] = None,
    ):
        super().__init__()

        self.web3 = web3
        self.registry_address = contracts[
            CONTRACT_TOKEN_NETWORK_REGISTRY].address
        self.user_deposit_contract = contracts[CONTRACT_USER_DEPOSIT]
        self.chain_id = ChainID(int(web3.net.version))
        self.address = private_key_to_address(private_key)
        self.required_confirmations = required_confirmations
        self._poll_interval = poll_interval
        self._is_running = gevent.event.Event()

        log.info("PFS payment address", address=self.address)

        self.blockchain_state = BlockchainState(
            latest_commited_block=BlockNumber(0),
            token_network_registry_address=self.registry_address,
            chain_id=self.chain_id,
        )

        self.database = PFSDatabase(
            filename=db_filename,
            pfs_address=self.address,
            sync_start_block=sync_start_block,
            token_network_registry_address=self.registry_address,
            chain_id=self.chain_id,
            user_deposit_contract_address=self.user_deposit_contract.address,
            allow_create=True,
        )

        self.matrix_listener = MatrixListener(
            private_key=private_key,
            chain_id=self.chain_id,
            service_room_suffix=PATH_FINDING_BROADCASTING_ROOM,
            message_received_callback=self.handle_message,
            address_reachability_changed_callback=self.
            handle_reachability_change,
            servers=matrix_servers,
        )

        self.address_to_reachability: Dict[Address,
                                           AddressReachability] = dict()
        self.token_networks = self._load_token_networks()
Esempio n. 3
0
def main(
    private_key: str,
    state_db: str,
    web3: Web3,
    contracts: Dict[str, Contract],
    start_block: BlockNumber,
    rdn_per_eth: float,
    expires_within: BlockNumber,
) -> None:
    pfs_address = private_key_to_address(private_key)
    chain_id = ChainID(web3.eth.chainId)
    database = PFSDatabase(filename=state_db,
                           chain_id=chain_id,
                           pfs_address=pfs_address,
                           sync_start_block=start_block)

    claim_cost_rdn = calc_claim_cost_rdn(web3, rdn_per_eth)
    ious = list(
        get_claimable_ious(
            database,
            expires_after=web3.eth.blockNumber,
            expires_before=BlockNumber(web3.eth.blockNumber + expires_within),
            claim_cost_rdn=claim_cost_rdn,
        ))
    print(f"Found {len(ious)} claimable IOUs")
    _, failures = claim_ious(ious, claim_cost_rdn,
                             contracts[CONTRACT_ONE_TO_N], web3, database)
    if failures:
        sys.exit(1)
Esempio n. 4
0
def main(
    private_key: str,
    state_db: str,
    web3: Web3,
    contracts: Dict[str, Contract],
    start_block: BlockNumber,
    rdn_per_eth: float,
    expires_within: BlockNumber,
) -> None:
    pfs_address = private_key_to_address(private_key)
    chain_id = ChainID(int(web3.net.version))
    database = PFSDatabase(filename=state_db,
                           chain_id=chain_id,
                           pfs_address=pfs_address)

    one_to_n_contract = contracts[CONTRACT_ONE_TO_N]

    claim_cost_eth = 90897
    claim_cost_rdn = TokenAmount(int(claim_cost_eth / rdn_per_eth))
    ious = list(
        get_claimable_ious(
            database,
            expires_before=web3.eth.blockNumber + expires_within,
            claim_cost_rdn=claim_cost_rdn,
        ))
    print(f"Found {len(ious)} claimable IOUs")
    _, failures = claim_ious(ious, claim_cost_rdn, one_to_n_contract, web3,
                             database)
    if failures:
        sys.exit(1)
Esempio n. 5
0
def get_claimable_ious(
    database: PFSDatabase, expires_before: BlockNumber, claim_cost_rdn: TokenAmount
) -> Iterable[IOU]:
    ious = database.get_ious(
        claimed=False, expires_before=expires_before, amount_at_least=claim_cost_rdn
    )
    return ious
Esempio n. 6
0
def claim_ious(
    ious: Iterable[IOU],
    claim_cost_rdn: TokenAmount,
    one_to_n_contract: Contract,
    web3: Web3,
    database: PFSDatabase,
) -> Tuple[int, int]:
    unchecked_txs = []
    skipped = 0
    for iou in ious:
        claim = one_to_n_contract.functions.claim(
            sender=iou.sender,
            receiver=iou.receiver,
            amount=iou.amount,
            expiration_block=iou.expiration_block,
            one_to_n_address=iou.one_to_n_address,
            signature=iou.signature,
        )
        transferrable = claim.call()
        if transferrable < claim_cost_rdn:
            print("Not enough user deposit to claim profitably for", iou)
            skipped += 1
            continue
        tx_hash = claim.transact()
        unchecked_txs.append((tx_hash, iou))

    failures = 0
    while unchecked_txs:
        for tx_hash, iou in unchecked_txs:
            try:
                receipt = web3.eth.getTransactionReceipt(tx_hash)
            except TransactionNotFound:
                continue

            unchecked_txs.remove((tx_hash, iou))
            if receipt["status"] == 1:
                print(f"Successfully claimed {iou}.")
                iou.claimed = True
                database.upsert_iou(iou)
            else:
                print(f"Claiming {iou} failed!")
                failures += 1

    return skipped, failures
Esempio n. 7
0
    def __init__(  # pylint: disable=too-many-arguments
        self,
        web3: Web3,
        contracts: Dict[str, Contract],
        private_key: str,
        db_filename: str,
        sync_start_block: BlockNumber = BlockNumber(0),
        required_confirmations: int = 8,
        poll_interval: float = 10,
    ):
        super().__init__()

        self.web3 = web3
        self.registry_address = contracts[CONTRACT_TOKEN_NETWORK_REGISTRY].address
        self.user_deposit_contract = contracts[CONTRACT_USER_DEPOSIT]
        self.chain_id = ChainID(int(web3.net.version))
        self.address = private_key_to_address(private_key)
        self._required_confirmations = required_confirmations
        self._poll_interval = poll_interval
        self._is_running = gevent.event.Event()

        self.database = PFSDatabase(
            filename=db_filename,
            pfs_address=self.address,
            sync_start_block=sync_start_block,
            token_network_registry_address=self.registry_address,
            chain_id=self.chain_id,
            user_deposit_contract_address=self.user_deposit_contract.address,
            allow_create=True,
        )
        self.token_networks = self._load_token_networks()

        try:
            self.matrix_listener = MatrixListener(
                private_key=private_key,
                chain_id=self.chain_id,
                callback=self.handle_message,
                service_room_suffix=PATH_FINDING_BROADCASTING_ROOM,
            )
        except ConnectionError as exc:
            log.critical("Could not connect to broadcasting system.", exc=exc)
            sys.exit(1)
Esempio n. 8
0
def get_claimable_ious(
    database: PFSDatabase,
    claimable_until_after: Timestamp,
    claimable_until_before: Timestamp,
    claim_cost_rdn: TokenAmount,
) -> Iterable[IOU]:
    return database.get_ious(
        claimed=False,
        claimable_until_after=claimable_until_after,
        claimable_until_before=claimable_until_before,
        amount_at_least=claim_cost_rdn,
    )
Esempio n. 9
0
    def __init__(
        self,
        web3: Web3,
        contracts: Dict[str, Contract],
        private_key: str,
        db_filename: str,
        sync_start_block: BlockNumber = BlockNumber(0),
        required_confirmations: int = 8,
        poll_interval: float = 10,
        service_fee: int = 0,
    ):
        super().__init__()

        self.web3 = web3
        self.registry_address = contracts[CONTRACT_TOKEN_NETWORK_REGISTRY].address
        self.sync_start_block = sync_start_block
        self.required_confirmations = required_confirmations
        self.poll_interval = poll_interval
        self.chain_id = ChainID(int(web3.net.version))
        self.private_key = private_key
        self.address = private_key_to_address(private_key)
        self.service_fee = service_fee

        self.is_running = gevent.event.Event()
        self.token_networks: Dict[TokenNetworkAddress, TokenNetwork] = {}
        self.database = PFSDatabase(filename=db_filename, pfs_address=self.address)
        self.user_deposit_contract = contracts[CONTRACT_USER_DEPOSIT]

        self.last_known_block = 0
        self.blockchain_state = BlockchainState(
            chain_id=self.chain_id,
            token_network_registry_address=self.registry_address,
            monitor_contract_address=Address(''),  # FIXME
            latest_known_block=self.sync_start_block,
            token_network_addresses=[],
        )
        log.info(
            'Listening to token network registry',
            registry_address=self.registry_address,
            start_block=sync_start_block,
        )

        try:
            self.matrix_listener = MatrixListener(
                private_key=private_key,
                chain_id=self.chain_id,
                callback=self.handle_message,
                service_room_suffix=PATH_FINDING_BROADCASTING_ROOM,
            )
        except ConnectionError as e:
            log.critical('Could not connect to broadcasting system.', exc=e)
            sys.exit(1)
Esempio n. 10
0
class PathfindingService(gevent.Greenlet):
    # pylint: disable=too-many-instance-attributes
    def __init__(  # pylint: disable=too-many-arguments
        self,
        web3: Web3,
        contracts: Dict[str, Contract],
        private_key: str,
        db_filename: str,
        sync_start_block: BlockNumber = BlockNumber(0),
        required_confirmations: int = 8,
        poll_interval: float = 10,
    ):
        super().__init__()

        self.web3 = web3
        self.registry_address = contracts[
            CONTRACT_TOKEN_NETWORK_REGISTRY].address
        self.user_deposit_contract = contracts[CONTRACT_USER_DEPOSIT]
        self.chain_id = ChainID(int(web3.net.version))
        self.address = private_key_to_address(private_key)
        self._required_confirmations = required_confirmations
        self._poll_interval = poll_interval
        self._is_running = gevent.event.Event()

        log.info("PFS payment address", address=self.address)

        self.blockchain_state = BlockchainState(
            latest_known_block=BlockNumber(0),
            token_network_registry_address=self.registry_address,
            chain_id=self.chain_id,
        )

        self.database = PFSDatabase(
            filename=db_filename,
            pfs_address=self.address,
            sync_start_block=sync_start_block,
            token_network_registry_address=self.registry_address,
            chain_id=self.chain_id,
            user_deposit_contract_address=self.user_deposit_contract.address,
            allow_create=True,
        )

        self.matrix_listener = MatrixListener(
            private_key=private_key,
            chain_id=self.chain_id,
            service_room_suffix=PATH_FINDING_BROADCASTING_ROOM,
            message_received_callback=self.handle_message,
            address_reachability_changed_callback=self.
            handle_reachability_change,
        )

        self.address_to_reachability: Dict[Address,
                                           AddressReachability] = dict()
        self.token_networks = self._load_token_networks()

    def _load_token_networks(self) -> Dict[TokenNetworkAddress, TokenNetwork]:
        network_for_address = {
            n.address: n
            for n in self.database.get_token_networks()
        }
        channel_views = self.database.get_channel_views()
        for cv in channel_views:
            network_for_address[cv.token_network_address].add_channel_view(cv)

            # Register channel participants for presence tracking
            self.matrix_listener.follow_address_presence(cv.participant1)
            self.matrix_listener.follow_address_presence(cv.participant2)

        return network_for_address

    def _run(self) -> None:  # pylint: disable=method-hidden
        register_error_handler()
        try:
            self.matrix_listener.start()
        except ConnectionError as exc:
            log.critical("Could not connect to broadcasting system.", exc=exc)
            sys.exit(1)

        log.info(
            "Listening to token network registry",
            registry_address=self.registry_address,
            start_block=self.database.get_latest_known_block(),
        )
        while not self._is_running.is_set():
            last_confirmed_block = self.web3.eth.blockNumber - self._required_confirmations

            max_query_interval_end_block = (
                self.database.get_latest_known_block() + MAX_FILTER_INTERVAL)
            # Limit the max number of blocks that is processed per iteration
            last_block = min(last_confirmed_block,
                             max_query_interval_end_block)

            self._process_new_blocks(last_block)

            try:
                gevent.sleep(self._poll_interval)
            except KeyboardInterrupt:
                log.info("Shutting down")
                sys.exit(0)

    def _process_new_blocks(self, last_block: BlockNumber) -> None:
        self.blockchain_state.latest_known_block = self.database.get_latest_known_block(
        )
        self.blockchain_state.token_network_addresses = list(
            self.token_networks.keys())

        _, events = get_blockchain_events(
            web3=self.web3,
            contract_manager=CONTRACT_MANAGER,
            chain_state=self.blockchain_state,
            to_block=last_block,
        )
        for event in events:
            self.handle_event(event)

    def stop(self) -> None:
        self.matrix_listener.stop()
        self._is_running.set()
        self.matrix_listener.join()

    def follows_token_network(
            self, token_network_address: TokenNetworkAddress) -> bool:
        """ Checks if a token network is followed by the pathfinding service. """
        return token_network_address in self.token_networks.keys()

    def handle_reachability_change(self, address: Address,
                                   reachability: AddressReachability) -> None:
        self.address_to_reachability[address] = reachability

    def get_token_network(
            self, token_network_address: TokenNetworkAddress
    ) -> Optional[TokenNetwork]:
        """ Returns the `TokenNetwork` for the given address or `None` for unknown networks. """
        return self.token_networks.get(token_network_address)

    def handle_event(self, event: Event) -> None:
        if isinstance(event, ReceiveTokenNetworkCreatedEvent):
            self.handle_token_network_created(event)
        elif isinstance(event, ReceiveChannelOpenedEvent):
            self.handle_channel_opened(event)
        elif isinstance(event, ReceiveChannelNewDepositEvent):
            self.handle_channel_new_deposit(event)
        elif isinstance(event, ReceiveChannelClosedEvent):
            self.handle_channel_closed(event)
        elif isinstance(event, UpdatedHeadBlockEvent):
            self.database.update_lastest_known_block(event.head_block_number)
        else:
            log.debug("Unhandled event", evt=event)

    def handle_token_network_created(
            self, event: ReceiveTokenNetworkCreatedEvent) -> None:
        network_address = TokenNetworkAddress(event.token_network_address)
        if not self.follows_token_network(network_address):
            log.info("Found new token network", event_=event)

            self.token_networks[network_address] = TokenNetwork(
                network_address)
            self.database.upsert_token_network(network_address)

    def handle_channel_opened(self, event: ReceiveChannelOpenedEvent) -> None:
        token_network = self.get_token_network(event.token_network_address)
        if token_network is None:
            return

        log.info("Received ChannelOpened event", event_=event)

        self.matrix_listener.follow_address_presence(event.participant1,
                                                     refresh=True)
        self.matrix_listener.follow_address_presence(event.participant2,
                                                     refresh=True)

        channel_views = token_network.handle_channel_opened_event(
            channel_identifier=event.channel_identifier,
            participant1=event.participant1,
            participant2=event.participant2,
            settle_timeout=event.settle_timeout,
        )
        for cv in channel_views:
            self.database.upsert_channel_view(cv)

        # Handle messages for this channel which where received before ChannelOpened
        with self.database.conn:
            for message in self.database.pop_waiting_messages(
                    token_network_address=token_network.address,
                    channel_id=event.channel_identifier):
                self.handle_message(message)

    def handle_channel_new_deposit(
            self, event: ReceiveChannelNewDepositEvent) -> None:
        token_network = self.get_token_network(event.token_network_address)
        if token_network is None:
            return

        log.info("Received ChannelNewDeposit event", event_=event)

        channel_view = token_network.handle_channel_new_deposit_event(
            channel_identifier=event.channel_identifier,
            receiver=event.participant_address,
            total_deposit=event.total_deposit,
        )
        if channel_view:
            self.database.upsert_channel_view(channel_view)

    def handle_channel_closed(self, event: ReceiveChannelClosedEvent) -> None:
        token_network = self.get_token_network(event.token_network_address)
        if token_network is None:
            return

        log.info("Received ChannelClosed event", event_=event)

        token_network.handle_channel_closed_event(
            channel_identifier=event.channel_identifier)
        self.database.delete_channel_views(event.channel_identifier)

    def handle_message(self, message: Message) -> None:
        try:
            if isinstance(message, PFSCapacityUpdate):
                changed_cvs = self.on_capacity_update(message)
            elif isinstance(message, PFSFeeUpdate):
                changed_cvs = self.on_fee_update(message)
            else:
                log.debug("Ignoring message", message=message)

            for cv in changed_cvs:
                self.database.upsert_channel_view(cv)

        except DeferMessage as ex:
            self.defer_message_until_channel_is_open(ex.deferred_message)
        except InvalidGlobalMessage as ex:
            log.info(str(ex), **asdict(message))

    def defer_message_until_channel_is_open(self,
                                            message: DeferableMessage) -> None:
        log.debug(
            "Received message for unknown channel, defer until ChannelOpened is confirmed",
            channel_id=message.canonical_identifier.channel_identifier,
            message=message,
        )
        self.database.insert_waiting_message(message)

    def on_fee_update(self, message: PFSFeeUpdate) -> List[ChannelView]:
        if message.sender != message.updating_participant:
            raise InvalidPFSFeeUpdate(
                "Invalid sender recovered from signature in PFSFeeUpdate")

        token_network = self.get_token_network(
            message.canonical_identifier.token_network_address)
        if not token_network:
            return []

        log.debug("Received Fee Update", message=message)

        if (message.canonical_identifier.channel_identifier
                not in token_network.channel_id_to_addresses):
            raise DeferMessage(message)

        return token_network.handle_channel_fee_update(message)

    def _validate_pfs_capacity_update(
            self, message: PFSCapacityUpdate) -> TokenNetwork:
        token_network_address = TokenNetworkAddress(
            message.canonical_identifier.token_network_address)

        # check if chain_id matches
        if message.canonical_identifier.chain_identifier != self.chain_id:
            raise InvalidCapacityUpdate(
                "Received Capacity Update with unknown chain identifier")

        # check if token network exists
        token_network = self.get_token_network(token_network_address)
        if token_network is None:
            raise InvalidCapacityUpdate(
                "Received Capacity Update with unknown token network")

        # check values < max int 256
        if message.updating_capacity > UINT256_MAX:
            raise InvalidCapacityUpdate(
                "Received Capacity Update with impossible updating_capacity")
        if message.other_capacity > UINT256_MAX:
            raise InvalidCapacityUpdate(
                "Received Capacity Update with impossible other_capacity")

        # check signature of Capacity Update
        if message.sender != message.updating_participant:
            raise InvalidCapacityUpdate("Capacity Update not signed correctly")

        # check if channel exists
        channel_identifier = message.canonical_identifier.channel_identifier
        if channel_identifier not in token_network.channel_id_to_addresses:
            raise DeferMessage(message)

        # check if participants fit to channel id
        participants = token_network.channel_id_to_addresses[
            channel_identifier]
        if message.updating_participant not in participants:
            raise InvalidCapacityUpdate(
                "Sender of Capacity Update does not match the internal channel"
            )
        if message.other_participant not in participants:
            raise InvalidCapacityUpdate(
                "Other Participant of Capacity Update does not match the internal channel"
            )

        return token_network

    def on_capacity_update(self,
                           message: PFSCapacityUpdate) -> List[ChannelView]:
        token_network = self._validate_pfs_capacity_update(message)
        log.debug("Received Capacity Update", message=message)
        self.database.upsert_capacity_update(message)

        # Follow presence for the channel participants
        self.matrix_listener.follow_address_presence(
            message.updating_participant, refresh=True)
        self.matrix_listener.follow_address_presence(message.other_participant,
                                                     refresh=True)

        updating_capacity_partner, other_capacity_partner = self.database.get_capacity_updates(
            updating_participant=message.other_participant,
            token_network_address=TokenNetworkAddress(
                message.canonical_identifier.token_network_address),
            channel_id=message.canonical_identifier.channel_identifier,
        )
        return token_network.handle_channel_balance_update_message(
            message=message,
            updating_capacity_partner=updating_capacity_partner,
            other_capacity_partner=other_capacity_partner,
        )
Esempio n. 11
0
class PathfindingService(gevent.Greenlet):
    # pylint: disable=too-many-instance-attributes
    def __init__(  # pylint: disable=too-many-arguments
        self,
        web3: Web3,
        contracts: Dict[str, Contract],
        private_key: PrivateKey,
        db_filename: str,
        sync_start_block: BlockNumber,
        required_confirmations: BlockTimeout,
        poll_interval: float,
        matrix_servers: Optional[List[str]] = None,
    ):
        super().__init__()

        self.web3 = web3
        self.registry_address = contracts[
            CONTRACT_TOKEN_NETWORK_REGISTRY].address
        self.user_deposit_contract = contracts[CONTRACT_USER_DEPOSIT]
        self.service_token_address = self.user_deposit_contract.functions.token(
        ).call()
        self.chain_id = ChainID(web3.eth.chainId)
        self.address = private_key_to_address(private_key)
        self.required_confirmations = required_confirmations
        self._poll_interval = poll_interval
        self._is_running = gevent.event.Event()

        log.info("PFS payment address", address=self.address)

        self.database = PFSDatabase(
            filename=db_filename,
            pfs_address=self.address,
            sync_start_block=sync_start_block,
            token_network_registry_address=to_canonical_address(
                self.registry_address),
            chain_id=self.chain_id,
            user_deposit_contract_address=to_canonical_address(
                self.user_deposit_contract.address),
            allow_create=True,
        )

        self.blockchain_state = BlockchainState(
            latest_committed_block=self.database.get_latest_committed_block(),
            token_network_registry_address=to_canonical_address(
                self.registry_address),
            chain_id=self.chain_id,
        )

        self.matrix_listener = MatrixListener(
            private_key=private_key,
            chain_id=self.chain_id,
            device_id=DeviceIDs.PFS,
            message_received_callback=self.handle_message,
            servers=matrix_servers,
        )

        self.token_networks = self._load_token_networks()
        self.updated = gevent.event.Event(
        )  # set whenever blocks are processed
        self.startup_finished = gevent.event.AsyncResult()

        self._init_metrics()

    def _init_metrics(self) -> None:
        def _get_number_of_claimed_ious() -> float:
            return float(self.database.get_nof_claimed_ious())

        def _get_total_amount_of_claimed_ious() -> float:
            return float(sum(iou.amount for iou in self._iter_claimed_ious()))

        metrics.get_metrics_for_label(
            metrics.IOU_CLAIMS, metrics.IouStatus.SUCCESSFUL).set_function(
                _get_number_of_claimed_ious)
        metrics.get_metrics_for_label(
            metrics.IOU_CLAIMS_TOKEN,
            metrics.IouStatus.SUCCESSFUL).set_function(
                _get_total_amount_of_claimed_ious)

    def _iter_claimed_ious(self) -> Iterator[IOU]:
        return self.database.get_ious(claimed=True)

    def _load_token_networks(self) -> Dict[TokenNetworkAddress, TokenNetwork]:
        network_for_address = {
            n.address: n
            for n in self.database.get_token_networks()
        }
        for channel in self.database.get_channels():
            for cv in channel.views:
                network_for_address[cv.token_network_address].add_channel_view(
                    cv)

        return network_for_address

    def _run(self) -> None:  # pylint: disable=method-hidden
        try:
            self.matrix_listener.start()
        except (Timeout, ConnectionError) as exc:
            log.critical("Could not connect to broadcasting system.", exc=exc)
            sys.exit(1)

        self.matrix_listener.link(self.startup_finished)
        try:
            self.matrix_listener.startup_finished.get(
                timeout=MATRIX_START_TIMEOUT)
        except Timeout:
            raise Exception("MatrixListener did not start in time.")
        self.startup_finished.set()

        log.info(
            "Listening to token network registry",
            registry_address=self.registry_address,
            start_block=self.database.get_latest_committed_block(),
        )
        while not self._is_running.is_set():
            self._process_new_blocks(
                BlockNumber(self.web3.eth.blockNumber -
                            self.required_confirmations))

            # Let tests waiting for this event know that we're done with processing
            self.updated.set()
            self.updated.clear()

            # Sleep, then collect errors from greenlets
            gevent.sleep(self._poll_interval)
            gevent.joinall({self.matrix_listener}, timeout=0, raise_error=True)

    def _process_new_blocks(self, latest_confirmed_block: BlockNumber) -> None:
        start = time.monotonic()

        db_block = self.database.get_latest_committed_block()
        assert db_block == self.blockchain_state.latest_committed_block, (
            f"Unexpected `latest_committed_block` in db: "
            f"was {db_block}, expected {self.blockchain_state.latest_committed_block}. "
            f"Is the db accidentally shared by two PFSes?")

        events = get_blockchain_events_adaptive(
            web3=self.web3,
            blockchain_state=self.blockchain_state,
            token_network_addresses=list(self.token_networks.keys()),
            latest_confirmed_block=latest_confirmed_block,
        )

        if events is None:
            return

        before_process = time.monotonic()
        for event in events:
            self.handle_event(event)
            gevent.idle()  # Allow answering requests in between events

        if events:
            log.info(
                "Processed events",
                getting=round(before_process - start, 2),
                processing=round(time.monotonic() - before_process, 2),
                total_duration=round(time.monotonic() - start, 2),
                event_counts=collections.Counter(e.__class__.__name__
                                                 for e in events),
            )

    def stop(self) -> None:
        self.matrix_listener.kill()
        self._is_running.set()
        self.matrix_listener.join()

    def follows_token_network(
            self, token_network_address: TokenNetworkAddress) -> bool:
        """Checks if a token network is followed by the pathfinding service."""
        return token_network_address in self.token_networks.keys()

    def get_token_network(
            self, token_network_address: TokenNetworkAddress
    ) -> Optional[TokenNetwork]:
        """Returns the `TokenNetwork` for the given address or `None` for unknown networks."""
        return self.token_networks.get(token_network_address)

    def handle_event(self, event: Event) -> None:
        with sentry_sdk.configure_scope() as scope:
            with metrics.collect_event_metrics(event):
                scope.set_extra("event", event)
                if isinstance(event, ReceiveTokenNetworkCreatedEvent):
                    self.handle_token_network_created(event)
                elif isinstance(event, ReceiveChannelOpenedEvent):
                    self.handle_channel_opened(event)
                elif isinstance(event, ReceiveChannelClosedEvent):
                    self.handle_channel_closed(event)
                elif isinstance(event, UpdatedHeadBlockEvent):
                    # TODO: Store blockhash here as well
                    self.blockchain_state.latest_committed_block = event.head_block_number
                    self.database.update_lastest_committed_block(
                        event.head_block_number)
                else:
                    log.debug("Unhandled event", evt=event)

    def handle_token_network_created(
            self, event: ReceiveTokenNetworkCreatedEvent) -> None:
        network_address = event.token_network_address
        if not self.follows_token_network(network_address):
            log.info("Found new token network", event_=event)

            self.token_networks[network_address] = TokenNetwork(
                network_address)
            self.database.upsert_token_network(network_address)

    def handle_channel_opened(self, event: ReceiveChannelOpenedEvent) -> None:
        token_network = self.get_token_network(event.token_network_address)
        if token_network is None:
            return

        log.info("Received ChannelOpened event", event_=event)

        channel = token_network.handle_channel_opened_event(
            channel_identifier=event.channel_identifier,
            participant1=event.participant1,
            participant2=event.participant2,
            settle_timeout=event.settle_timeout,
        )
        self.database.upsert_channel(channel)

        # Handle messages for this channel which where received before ChannelOpened
        with self.database.conn:
            for message in self.database.pop_waiting_messages(
                    token_network_address=token_network.address,
                    channel_id=event.channel_identifier):
                log.debug("Processing deferred message", message=message)
                self.handle_message(message)

    def handle_channel_closed(self, event: ReceiveChannelClosedEvent) -> None:
        token_network = self.get_token_network(event.token_network_address)
        if token_network is None:
            return

        log.info("Received ChannelClosed event", event_=event)

        channel_deleted = self.database.delete_channel(
            event.token_network_address, event.channel_identifier)
        if channel_deleted:
            token_network.handle_channel_closed_event(event.channel_identifier)
        else:
            log.error(
                "Received ChannelClosed event for unknown channel",
                token_network_address=event.token_network_address,
                channel_identifier=event.channel_identifier,
            )
            metrics.get_metrics_for_label(metrics.ERRORS_LOGGED,
                                          metrics.ErrorCategory.STATE).inc()

    def handle_message(self, message: Message) -> None:
        with sentry_sdk.configure_scope() as scope:
            scope.set_extra("message", message)
            try:
                with metrics.collect_message_metrics(message):
                    if isinstance(message, PFSCapacityUpdate):
                        changed_channel: Optional[
                            Channel] = self.on_capacity_update(message)
                    elif isinstance(message, PFSFeeUpdate):
                        changed_channel = self.on_fee_update(message)
                    else:
                        log.debug("Ignoring message", unknown_message=message)
                        return

                    if changed_channel:
                        self.database.upsert_channel(changed_channel)

            except DeferMessage as ex:
                self.defer_message_until_channel_is_open(ex.deferred_message)
            except InvalidGlobalMessage as ex:
                log.info(str(ex), **asdict(message))

    def defer_message_until_channel_is_open(self,
                                            message: DeferableMessage) -> None:
        log.debug(
            "Received message for unknown channel, defer until ChannelOpened is confirmed",
            channel_id=message.canonical_identifier.channel_identifier,
            message=message,
        )
        self.database.insert_waiting_message(message)

    def _validate_pfs_fee_update(self, message: PFSFeeUpdate) -> TokenNetwork:
        # check if chain_id matches
        if message.canonical_identifier.chain_identifier != self.chain_id:
            raise InvalidFeeUpdate(
                "Received Fee Update with unknown chain identifier")

        # check if token network exists
        token_network = self.get_token_network(
            message.canonical_identifier.token_network_address)
        if token_network is None:
            raise InvalidFeeUpdate(
                "Received Fee Update with unknown token network")

        # check signature of Capacity Update
        if message.sender != message.updating_participant:
            raise InvalidFeeUpdate("Fee Update not signed correctly")

        # check if channel exists
        channel_identifier = message.canonical_identifier.channel_identifier
        if channel_identifier not in token_network.channel_id_to_addresses:
            raise DeferMessage(message)

        # check if participants fit to channel id
        participants = token_network.channel_id_to_addresses[
            channel_identifier]
        if message.updating_participant not in participants:
            raise InvalidFeeUpdate(
                "Sender of Fee Update does not match the internal channel")

        # check that timestamp has no timezone
        if message.timestamp.tzinfo is not None:
            raise InvalidFeeUpdate(
                "Timestamp of Fee Update should not contain timezone")

        return token_network

    def on_fee_update(self, message: PFSFeeUpdate) -> Optional[Channel]:
        token_network = self._validate_pfs_fee_update(message)
        log.debug("Received Fee Update", message=message)

        return token_network.handle_channel_fee_update(message)

    def _validate_pfs_capacity_update(
            self, message: PFSCapacityUpdate) -> TokenNetwork:
        # check if chain_id matches
        if message.canonical_identifier.chain_identifier != self.chain_id:
            raise InvalidCapacityUpdate(
                "Received Capacity Update with unknown chain identifier")

        # check if token network exists
        token_network = self.get_token_network(
            message.canonical_identifier.token_network_address)
        if token_network is None:
            raise InvalidCapacityUpdate(
                "Received Capacity Update with unknown token network")

        # check values < max int 256
        if message.updating_capacity > UINT256_MAX:
            raise InvalidCapacityUpdate(
                "Received Capacity Update with impossible updating_capacity")
        if message.other_capacity > UINT256_MAX:
            raise InvalidCapacityUpdate(
                "Received Capacity Update with impossible other_capacity")

        # check signature of Capacity Update
        if message.sender != message.updating_participant:
            raise InvalidCapacityUpdate("Capacity Update not signed correctly")

        # check if channel exists
        channel_identifier = message.canonical_identifier.channel_identifier
        if channel_identifier not in token_network.channel_id_to_addresses:
            raise DeferMessage(message)

        # check if participants fit to channel id
        participants = token_network.channel_id_to_addresses[
            channel_identifier]
        if message.updating_participant not in participants:
            raise InvalidCapacityUpdate(
                "Sender of Capacity Update does not match the internal channel"
            )
        if message.other_participant not in participants:
            raise InvalidCapacityUpdate(
                "Other Participant of Capacity Update does not match the internal channel"
            )

        return token_network

    def on_capacity_update(self, message: PFSCapacityUpdate) -> Channel:
        token_network = self._validate_pfs_capacity_update(message)
        log.debug("Received Capacity Update", message=message)
        self.database.upsert_capacity_update(message)

        updating_capacity_partner, other_capacity_partner = self.database.get_capacity_updates(
            updating_participant=message.other_participant,
            token_network_address=TokenNetworkAddress(
                message.canonical_identifier.token_network_address),
            channel_id=message.canonical_identifier.channel_identifier,
        )
        return token_network.handle_channel_balance_update_message(
            message=message,
            updating_capacity_partner=updating_capacity_partner,
            other_capacity_partner=other_capacity_partner,
        )
Esempio n. 12
0
    def __init__(
            self,
            web3: Web3,
            contract_manager: ContractManager,
            registry_address: Address,
            private_key: str,
            db_filename: str,
            user_deposit_contract_address: Address,
            sync_start_block: int = 0,
            required_confirmations: int = 8,
            poll_interval: int = 10,
            service_fee: int = 0,
    ):
        """ Creates a new pathfinding service

        Args:
            contract_manager: A contract manager
            token_network_listener: A blockchain listener object
            token_network_registry_listener: A blockchain listener object for the network registry
            chain_id: The id of the chain the PFS runs on
        """
        super().__init__()

        self.web3 = web3
        self.contract_manager = contract_manager
        self.registry_address = registry_address
        self.sync_start_block = sync_start_block
        self.required_confirmations = required_confirmations
        self.poll_interval = poll_interval
        self.chain_id = int(web3.net.version)
        self.private_key = private_key
        self.address = private_key_to_address(private_key)
        self.service_fee = service_fee

        self.is_running = gevent.event.Event()
        self.token_networks: Dict[Address, TokenNetwork] = {}
        self.token_network_listeners: List[BlockchainListener] = []
        self.database = PFSDatabase(
            filename=db_filename,
            pfs_address=self.address,
        )
        self.user_deposit_contract = web3.eth.contract(
            abi=self.contract_manager.get_contract_abi(
                CONTRACT_USER_DEPOSIT,
            ),
            address=user_deposit_contract_address,
        )

        log.info(
            'Starting TokenNetworkRegistry Listener',
            required_confirmations=self.required_confirmations,
        )
        self.token_network_registry_listener = BlockchainListener(
            web3=web3,
            contract_manager=self.contract_manager,
            contract_name=CONTRACT_TOKEN_NETWORK_REGISTRY,
            contract_address=self.registry_address,
            required_confirmations=self.required_confirmations,
            poll_interval=self.poll_interval,
            sync_start_block=self.sync_start_block,
        )
        log.info(
            'Listening to token network registry',
            registry_address=registry_address,
            start_block=sync_start_block,
        )
        self._setup_token_networks()

        try:
            self.matrix_listener = MatrixListener(
                private_key=private_key,
                chain_id=self.chain_id,
                callback=self.handle_message,
                service_room_suffix=PATH_FINDING_BROADCASTING_ROOM,
            )
        except ConnectionError as e:
            log.critical(
                'Could not connect to broadcasting system.',
                exc=e,
            )
            sys.exit(1)
Esempio n. 13
0
class PathfindingService(gevent.Greenlet):
    # pylint: disable=too-many-instance-attributes
    def __init__(  # pylint: disable=too-many-arguments
        self,
        web3: Web3,
        contracts: Dict[str, Contract],
        private_key: str,
        db_filename: str,
        sync_start_block: BlockNumber = BlockNumber(0),
        required_confirmations: int = 8,
        poll_interval: float = 10,
    ):
        super().__init__()

        self.web3 = web3
        self.registry_address = contracts[CONTRACT_TOKEN_NETWORK_REGISTRY].address
        self.user_deposit_contract = contracts[CONTRACT_USER_DEPOSIT]
        self.chain_id = ChainID(int(web3.net.version))
        self.address = private_key_to_address(private_key)
        self._required_confirmations = required_confirmations
        self._poll_interval = poll_interval
        self._is_running = gevent.event.Event()

        self.database = PFSDatabase(
            filename=db_filename,
            pfs_address=self.address,
            sync_start_block=sync_start_block,
            token_network_registry_address=self.registry_address,
            chain_id=self.chain_id,
            user_deposit_contract_address=self.user_deposit_contract.address,
            allow_create=True,
        )
        self.token_networks = self._load_token_networks()

        try:
            self.matrix_listener = MatrixListener(
                private_key=private_key,
                chain_id=self.chain_id,
                callback=self.handle_message,
                service_room_suffix=PATH_FINDING_BROADCASTING_ROOM,
            )
        except ConnectionError as exc:
            log.critical("Could not connect to broadcasting system.", exc=exc)
            sys.exit(1)

    def _load_token_networks(self) -> Dict[TokenNetworkAddress, TokenNetwork]:
        network_for_address = {n.address: n for n in self.database.get_token_networks()}
        channel_views = self.database.get_channel_views()
        for cv in channel_views:
            network_for_address[cv.token_network_address].add_channel_view(cv)
        return network_for_address

    def _run(self) -> None:  # pylint: disable=method-hidden
        register_error_handler()
        self.matrix_listener.start()

        log.info(
            "Listening to token network registry",
            registry_address=self.registry_address,
            start_block=self.database.get_latest_known_block(),
        )
        while not self._is_running.is_set():
            last_confirmed_block = self.web3.eth.blockNumber - self._required_confirmations

            max_query_interval_end_block = (
                self.database.get_latest_known_block() + MAX_FILTER_INTERVAL
            )
            # Limit the max number of blocks that is processed per iteration
            last_block = min(last_confirmed_block, max_query_interval_end_block)

            self._process_new_blocks(last_block)

            try:
                gevent.sleep(self._poll_interval)
            except KeyboardInterrupt:
                log.info("Shutting down")
                sys.exit(0)

    def _process_new_blocks(self, last_block: BlockNumber) -> None:
        _, events = get_blockchain_events(
            web3=self.web3,
            contract_manager=CONTRACT_MANAGER,
            chain_state=BlockchainState(
                latest_known_block=self.database.get_latest_known_block(),
                token_network_addresses=list(self.token_networks.keys()),
                token_network_registry_address=self.registry_address,
                monitor_contract_address=Address(""),  # FIXME
                chain_id=self.chain_id,
            ),
            to_block=last_block,
            query_ms=False,
        )
        for event in events:
            self.handle_event(event)

    def stop(self) -> None:
        self.matrix_listener.stop()
        self._is_running.set()
        self.matrix_listener.join()

    def follows_token_network(self, token_network_address: TokenNetworkAddress) -> bool:
        """ Checks if a token network is followed by the pathfinding service. """
        return token_network_address in self.token_networks.keys()

    def get_token_network(
        self, token_network_address: TokenNetworkAddress
    ) -> Optional[TokenNetwork]:
        """ Returns the `TokenNetwork` for the given address or `None` for unknown networks. """
        return self.token_networks.get(token_network_address)

    def handle_event(self, event: Event) -> None:
        if isinstance(event, ReceiveTokenNetworkCreatedEvent):
            self.handle_token_network_created(event)
        elif isinstance(event, ReceiveChannelOpenedEvent):
            self.handle_channel_opened(event)
        elif isinstance(event, ReceiveChannelNewDepositEvent):
            self.handle_channel_new_deposit(event)
        elif isinstance(event, ReceiveChannelClosedEvent):
            self.handle_channel_closed(event)
        elif isinstance(event, UpdatedHeadBlockEvent):
            self.database.update_lastest_known_block(event.head_block_number)
        else:
            log.debug("Unhandled event", evt=event)

    def handle_token_network_created(self, event: ReceiveTokenNetworkCreatedEvent) -> None:
        network_address = TokenNetworkAddress(event.token_network_address)
        if not self.follows_token_network(network_address):
            log.info("Found new token network", **asdict(event))

            self.token_networks[network_address] = TokenNetwork(network_address)
            self.database.upsert_token_network(network_address)

    def handle_channel_opened(self, event: ReceiveChannelOpenedEvent) -> None:
        token_network = self.get_token_network(event.token_network_address)
        if token_network is None:
            return

        log.info("Received ChannelOpened event", **asdict(event))

        channel_views = token_network.handle_channel_opened_event(
            channel_identifier=event.channel_identifier,
            participant1=event.participant1,
            participant2=event.participant2,
            settle_timeout=event.settle_timeout,
        )
        for cv in channel_views:
            self.database.upsert_channel_view(cv)

    def handle_channel_new_deposit(self, event: ReceiveChannelNewDepositEvent) -> None:
        token_network = self.get_token_network(event.token_network_address)
        if token_network is None:
            return

        log.info("Received ChannelNewDeposit event", **asdict(event))

        channel_view = token_network.handle_channel_new_deposit_event(
            channel_identifier=event.channel_identifier,
            receiver=event.participant_address,
            total_deposit=event.total_deposit,
        )
        if channel_view:
            self.database.upsert_channel_view(channel_view)

    def handle_channel_closed(self, event: ReceiveChannelClosedEvent) -> None:
        token_network = self.get_token_network(event.token_network_address)
        if token_network is None:
            return

        log.info("Received ChannelClosed event", **asdict(event))

        token_network.handle_channel_closed_event(channel_identifier=event.channel_identifier)
        self.database.delete_channel_views(event.channel_identifier)

    def handle_message(self, message: SignedMessage) -> None:
        if isinstance(message, UpdatePFS):
            try:
                self.on_pfs_update(message)
            except InvalidCapacityUpdate as x:
                log.info(str(x), **message.to_dict())
        else:
            log.info("Ignoring unknown message type")

    def _validate_pfs_update(self, message: UpdatePFS) -> TokenNetwork:
        token_network_address = to_checksum_address(
            message.canonical_identifier.token_network_address
        )

        updating_participant = to_checksum_address(message.updating_participant)
        other_participant = to_checksum_address(message.other_participant)

        # check if chain_id matches
        if message.canonical_identifier.chain_identifier != self.chain_id:
            raise InvalidCapacityUpdate("Received Capacity Update with unknown chain identifier")

        # check if token network exists
        token_network = self.get_token_network(token_network_address)
        if token_network is None:
            raise InvalidCapacityUpdate("Received Capacity Update with unknown token network")

        # check if channel exists
        channel_identifier = message.canonical_identifier.channel_identifier
        if channel_identifier not in token_network.channel_id_to_addresses:
            raise InvalidCapacityUpdate(
                "Received Capacity Update with unknown channel identifier in token network"
            )

        # check values < max int 256
        if message.updating_capacity > UINT256_MAX:
            raise InvalidCapacityUpdate(
                "Received Capacity Update with impossible updating_capacity"
            )
        if message.other_capacity > UINT256_MAX:
            raise InvalidCapacityUpdate("Received Capacity Update with impossible other_capacity")

        # check if participants fit to channel id
        participants = token_network.channel_id_to_addresses[channel_identifier]
        if updating_participant not in participants:
            raise InvalidCapacityUpdate(
                "Sender of Capacity Update does not match the internal channel"
            )
        if other_participant not in participants:
            raise InvalidCapacityUpdate(
                "Other Participant of Capacity Update does not match the internal channel"
            )

        # check signature of Capacity Update
        signer = to_checksum_address(message.sender)  # recover address from signature
        if not is_same_address(signer, updating_participant):
            raise InvalidCapacityUpdate("Capacity Update not signed correctly")

        # check if nonce is higher than current nonce
        view_to_partner, view_from_partner = token_network.get_channel_views_for_partner(
            channel_identifier=channel_identifier,
            updating_participant=updating_participant,
            other_participant=other_participant,
        )
        is_nonce_pair_known = (
            message.updating_nonce <= view_to_partner.update_nonce
            and message.other_nonce <= view_from_partner.update_nonce
        )
        if is_nonce_pair_known:
            raise InvalidCapacityUpdate("Capacity Update already received")

        return token_network

    def on_pfs_update(self, message: UpdatePFS) -> None:
        token_network = self._validate_pfs_update(message)
        log.info("Received Capacity Update", **message.to_dict())
        token_network.handle_channel_balance_update_message(message)