def __init__( # pylint: disable=too-many-arguments self, web3: Web3, contracts: Dict[str, Contract], private_key: PrivateKey, db_filename: str, sync_start_block: BlockNumber, required_confirmations: BlockTimeout, poll_interval: float, matrix_servers: Optional[List[str]] = None, ): super().__init__() self.web3 = web3 self.registry_address = contracts[ CONTRACT_TOKEN_NETWORK_REGISTRY].address self.user_deposit_contract = contracts[CONTRACT_USER_DEPOSIT] self.service_token_address = self.user_deposit_contract.functions.token( ).call() self.chain_id = ChainID(web3.eth.chainId) self.address = private_key_to_address(private_key) self.required_confirmations = required_confirmations self._poll_interval = poll_interval self._is_running = gevent.event.Event() log.info("PFS payment address", address=self.address) self.database = PFSDatabase( filename=db_filename, pfs_address=self.address, sync_start_block=sync_start_block, token_network_registry_address=to_canonical_address( self.registry_address), chain_id=self.chain_id, user_deposit_contract_address=to_canonical_address( self.user_deposit_contract.address), allow_create=True, ) self.blockchain_state = BlockchainState( latest_committed_block=self.database.get_latest_committed_block(), token_network_registry_address=to_canonical_address( self.registry_address), chain_id=self.chain_id, ) self.matrix_listener = MatrixListener( private_key=private_key, chain_id=self.chain_id, device_id=DeviceIDs.PFS, message_received_callback=self.handle_message, servers=matrix_servers, ) self.token_networks = self._load_token_networks() self.updated = gevent.event.Event( ) # set whenever blocks are processed self.startup_finished = gevent.event.AsyncResult() self._init_metrics()
def get_blockchain_state(self) -> BlockchainState: blockchain = self.conn.execute("SELECT * FROM blockchain").fetchone() latest_committed_block = blockchain["latest_committed_block"] return BlockchainState( chain_id=blockchain["chain_id"], token_network_registry_address=blockchain["token_network_registry_address"], monitor_contract_address=blockchain["monitor_contract_address"], latest_committed_block=latest_committed_block, )
def __init__( # pylint: disable=too-many-arguments self, web3: Web3, contracts: Dict[str, Contract], private_key: str, db_filename: str, sync_start_block: BlockNumber = BlockNumber(0), required_confirmations: int = 8, poll_interval: float = 10, matrix_servers: List[str] = None, ): super().__init__() self.web3 = web3 self.registry_address = contracts[ CONTRACT_TOKEN_NETWORK_REGISTRY].address self.user_deposit_contract = contracts[CONTRACT_USER_DEPOSIT] self.chain_id = ChainID(int(web3.net.version)) self.address = private_key_to_address(private_key) self.required_confirmations = required_confirmations self._poll_interval = poll_interval self._is_running = gevent.event.Event() log.info("PFS payment address", address=self.address) self.blockchain_state = BlockchainState( latest_commited_block=BlockNumber(0), token_network_registry_address=self.registry_address, chain_id=self.chain_id, ) self.database = PFSDatabase( filename=db_filename, pfs_address=self.address, sync_start_block=sync_start_block, token_network_registry_address=self.registry_address, chain_id=self.chain_id, user_deposit_contract_address=self.user_deposit_contract.address, allow_create=True, ) self.matrix_listener = MatrixListener( private_key=private_key, chain_id=self.chain_id, service_room_suffix=PATH_FINDING_BROADCASTING_ROOM, message_received_callback=self.handle_message, address_reachability_changed_callback=self. handle_reachability_change, servers=matrix_servers, ) self.address_to_reachability: Dict[Address, AddressReachability] = dict() self.token_networks = self._load_token_networks()
def __init__( self, web3: Web3, contracts: Dict[str, Contract], private_key: str, db_filename: str, sync_start_block: BlockNumber = BlockNumber(0), required_confirmations: int = 8, poll_interval: float = 10, service_fee: int = 0, ): super().__init__() self.web3 = web3 self.registry_address = contracts[CONTRACT_TOKEN_NETWORK_REGISTRY].address self.sync_start_block = sync_start_block self.required_confirmations = required_confirmations self.poll_interval = poll_interval self.chain_id = ChainID(int(web3.net.version)) self.private_key = private_key self.address = private_key_to_address(private_key) self.service_fee = service_fee self.is_running = gevent.event.Event() self.token_networks: Dict[TokenNetworkAddress, TokenNetwork] = {} self.database = PFSDatabase(filename=db_filename, pfs_address=self.address) self.user_deposit_contract = contracts[CONTRACT_USER_DEPOSIT] self.last_known_block = 0 self.blockchain_state = BlockchainState( chain_id=self.chain_id, token_network_registry_address=self.registry_address, monitor_contract_address=Address(''), # FIXME latest_known_block=self.sync_start_block, token_network_addresses=[], ) log.info( 'Listening to token network registry', registry_address=self.registry_address, start_block=sync_start_block, ) try: self.matrix_listener = MatrixListener( private_key=private_key, chain_id=self.chain_id, callback=self.handle_message, service_room_suffix=PATH_FINDING_BROADCASTING_ROOM, ) except ConnectionError as e: log.critical('Could not connect to broadcasting system.', exc=e) sys.exit(1)
def get_blockchain_state(self) -> BlockchainState: blockchain = self.conn.execute("SELECT * FROM blockchain").fetchone() token_network_addresses = [ row[0] for row in self.conn.execute("SELECT address FROM token_network") ] latest_known_block = blockchain["latest_known_block"] return BlockchainState( chain_id=blockchain["chain_id"], token_network_registry_address=blockchain["token_network_registry_address"], monitor_contract_address=blockchain["monitor_contract_address"], latest_known_block=latest_known_block, token_network_addresses=token_network_addresses, )
def test_get_blockchain_events_returns_early_for_invalid_interval( web3: Web3, token_network_registry_contract: Contract): events = get_blockchain_events( web3=web3, token_network_addresses=[], chain_state=BlockchainState( chain_id=ChainID(1), token_network_registry_address=to_canonical_address( token_network_registry_contract.address), latest_committed_block=BlockNumber(4), ), from_block=BlockNumber(10), to_block=BlockNumber(5), ) assert len(events) == 0
def _process_new_blocks(self, last_block: BlockNumber) -> None: _, events = get_blockchain_events( web3=self.web3, contract_manager=CONTRACT_MANAGER, chain_state=BlockchainState( latest_known_block=self.database.get_latest_known_block(), token_network_addresses=list(self.token_networks.keys()), token_network_registry_address=self.registry_address, monitor_contract_address=Address(""), # FIXME chain_id=self.chain_id, ), to_block=last_block, query_ms=False, ) for event in events: self.handle_event(event)
def load_state(self) -> MonitoringServiceState: """ Load MS state from db or return a new empty state if not saved one is present """ blockchain = self.conn.execute("SELECT * FROM blockchain").fetchone() token_network_addresses = [ row[0] for row in self.conn.execute("SELECT address FROM token_network") ] latest_known_block = blockchain['latest_known_block'] chain_state = BlockchainState( chain_id=blockchain['chain_id'], token_network_registry_address=blockchain['token_network_registry_address'], monitor_contract_address=blockchain['monitor_contract_address'], latest_known_block=latest_known_block, token_network_addresses=token_network_addresses, ) ms_state = MonitoringServiceState( blockchain_state=chain_state, address=blockchain['receiver'] ) return ms_state
def test_get_blockchain_events_adaptive_reduces_block_interval_after_timeout( web3: Web3, token_network_registry_contract: Contract): chain_state = BlockchainState( chain_id=ChainID(1), token_network_registry_address=to_canonical_address( token_network_registry_contract.address), latest_committed_block=BlockNumber(4), ) assert chain_state.current_event_filter_interval == DEFAULT_FILTER_INTERVAL with patch("raiden_libs.blockchain.get_blockchain_events", side_effect=ReadTimeout): _ = get_blockchain_events_adaptive( web3=web3, token_network_addresses=[], blockchain_state=chain_state, latest_confirmed_block=BlockNumber(1), ) assert chain_state.current_event_filter_interval == DEFAULT_FILTER_INTERVAL // 5
def get_blockchain_events_adaptive( web3: Web3, blockchain_state: BlockchainState, token_network_addresses: List[TokenNetworkAddress], latest_confirmed_block: BlockNumber, ) -> Optional[List[Event]]: """ Queries new events from the blockchain. Uses an adaptive interval, so that the ethereum nodes aren't overloaded. Args: web3: Web3 object blockchain_state: The blockchain state objected. This is mutated and should be reused. token_network_addresses: List of known token network addresses. This is mutated when a new token network is found. However, additionally a `ReceiveTokenNetworkCreatedEvent` is created as well and it is recommended to use that instead and to not reuse this list. latest_confirmed_block: The latest block to query to Returns: A list of events if successful, otherwise ``None`` """ # increment by one, as `latest_committed_block` has been queried last time already from_block = BlockNumber(blockchain_state.latest_committed_block + 1) to_block = min( latest_confirmed_block, # decrement by one, as both limits are inclusive BlockNumber(from_block + blockchain_state.current_event_filter_interval - 1), ) try: before_query = time.monotonic() events = get_blockchain_events( web3=web3, token_network_addresses=token_network_addresses, chain_state=blockchain_state, from_block=from_block, to_block=to_block, ) after_query = time.monotonic() filter_query_duration = after_query - before_query if filter_query_duration < ETH_GET_LOGS_THRESHOLD_FAST: blockchain_state.current_event_filter_interval = BlockTimeout( min(MAX_FILTER_INTERVAL, blockchain_state.current_event_filter_interval * 2)) elif filter_query_duration > ETH_GET_LOGS_THRESHOLD_SLOW: blockchain_state.current_event_filter_interval = BlockTimeout( max(MIN_FILTER_INTERVAL, blockchain_state.current_event_filter_interval // 2)) return events except ReadTimeout: old_interval = blockchain_state.current_event_filter_interval blockchain_state.current_event_filter_interval = BlockTimeout( max(MIN_FILTER_INTERVAL, old_interval // 5)) log.debug( "Failed to query events in time, reducing interval", old_interval=old_interval, new_interval=blockchain_state.current_event_filter_interval, ) return None