def get_filter_args_for_specific_event_from_channel( token_network_address: TokenNetworkAddress, channel_identifier: ChannelID, event_name: str, contract_manager: ContractManager, from_block: BlockIdentifier = GENESIS_BLOCK_NUMBER, to_block: BlockIdentifier = BLOCK_ID_LATEST, ) -> FilterParams: """ Return the filter params for a specific event of a given channel. """ event_abi = contract_manager.get_event_abi(CONTRACT_TOKEN_NETWORK, event_name) # Here the topics for a specific event are created # The first entry of the topics list is the event name, then the first parameter is encoded, # in the case of a token network, the first parameter is always the channel identifier _, event_filter_params = construct_event_filter_params( event_abi=event_abi, abi_codec=ABI_CODEC, contract_address=to_checksum_address(token_network_address), argument_filters={"channel_identifier": channel_identifier}, fromBlock=from_block, toBlock=to_block, ) return event_filter_params
def _fetch_events_for_all_contracts(web3, event, argument_filters: dict, from_block: int, to_block: int) -> Iterable: """Get events using eth_getLoggers API. This method is detached from any contract instance. This is a stateless method, as opposed to createFilter. It can be safely called against nodes which do not provide `eth_newFilter` API, like Infura. """ if from_block is None: raise TypeError( "Missing mandatory keyword argument to getLoggers: fromBlock") # Currently no way to poke this using a public Web3.py API. # This will return raw underlying ABI JSON object for the event abi = event._get_event_abi() # Depending on the Solidity version used to compile # the contract that uses the ABI, # it might have Solidity ABI encoding v1 or v2. # We just assume the default that you set on Web3 object here. # More information here https://eth-abi.readthedocs.io/en/latest/index.html codec: ABICodec = web3.codec # Here we need to poke a bit into Web3 internals, as this # functionality is not exposed by default. # Construct JSON-RPC raw filter presentation based on human readable Python descriptions # Namely, convert event names to their keccak signatures # More information here: # https://github.com/ethereum/web3.py/blob/e176ce0793dafdd0573acc8d4b76425b6eb604ca/web3/_utils/filters.py#L71 data_filter_set, event_filter_params = construct_event_filter_params( abi, codec, address=argument_filters.get("address"), argument_filters=argument_filters, fromBlock=from_block, toBlock=to_block, ) logger.debug("Querying eth_getLoggers with the following parameters: %s", event_filter_params) # Call JSON-RPC API on your Ethereum node. # get_logs() returns raw AttributedDict entries logs = web3.eth.getLogs(event_filter_params) # Convert raw binary data to Python proxy objects as described by ABI all_events = [] for log in logs: # Convert raw JSON-RPC log result to human readable event by using ABI data # More information how processLog works here # https://github.com/ethereum/web3.py/blob/fbaf1ad11b0c7fac09ba34baff2c256cffe0a148/web3/_utils/events.py#L200 evt = get_event_data(codec, abi, log) # Note: This was originally yield, # but deferring the timeout exception caused the throttle logic not to work all_events.append(evt) return all_events
def createFilter( self, *, # PEP 3102 argument_filters=None, fromBlock=None, toBlock="latest", address=None, topics=None): """ Create filter object that tracks logs emitted by this contract event. :param filter_params: other parameters to limit the events """ if fromBlock is None: raise TypeError("Missing mandatory keyword argument to createFilter: fromBlock") if argument_filters is None: argument_filters = dict() _filters = dict(**argument_filters) event_abi = self._get_event_abi() check_for_forbidden_api_filter_arguments(event_abi, _filters) _, event_filter_params = construct_event_filter_params( self._get_event_abi(), self.web3.codec, contract_address=self.address, argument_filters=_filters, fromBlock=fromBlock, toBlock=toBlock, address=address, topics=topics, ) filter_builder = EventFilterBuilder(event_abi, self.web3.codec) filter_builder.address = event_filter_params.get('address') filter_builder.fromBlock = event_filter_params.get('fromBlock') filter_builder.toBlock = event_filter_params.get('toBlock') match_any_vals = { arg: value for arg, value in _filters.items() if not is_array_type(filter_builder.args[arg].arg_type) and is_list_like(value) } for arg, value in match_any_vals.items(): filter_builder.args[arg].match_any(*value) match_single_vals = { arg: value for arg, value in _filters.items() if not is_array_type(filter_builder.args[arg].arg_type) and not is_list_like(value) } for arg, value in match_single_vals.items(): filter_builder.args[arg].match_single(value) log_filter = filter_builder.deploy(self.web3) log_filter.log_entry_formatter = get_event_data(self.web3.codec, self._get_event_abi()) log_filter.builder = filter_builder return log_filter
def createFilter( self, *, # PEP 3102 argument_filters=None, fromBlock=None, toBlock="latest", address=None, topics=None): """ Create filter object that tracks logs emitted by this contract event. :param filter_params: other parameters to limit the events """ if fromBlock is None: raise TypeError("Missing mandatory keyword argument to createFilter: fromBlock") if argument_filters is None: argument_filters = dict() _filters = dict(**argument_filters) event_abi = self._get_event_abi() check_for_forbidden_api_filter_arguments(event_abi, _filters) _, event_filter_params = construct_event_filter_params( self._get_event_abi(), contract_address=self.address, argument_filters=_filters, fromBlock=fromBlock, toBlock=toBlock, address=address, topics=topics, ) filter_builder = EventFilterBuilder(event_abi) filter_builder.address = event_filter_params.get('address') filter_builder.fromBlock = event_filter_params.get('fromBlock') filter_builder.toBlock = event_filter_params.get('toBlock') match_any_vals = { arg: value for arg, value in _filters.items() if not is_array_type(filter_builder.args[arg].arg_type) and is_list_like(value) } for arg, value in match_any_vals.items(): filter_builder.args[arg].match_any(*value) match_single_vals = { arg: value for arg, value in _filters.items() if not is_array_type(filter_builder.args[arg].arg_type) and not is_list_like(value) } for arg, value in match_single_vals.items(): filter_builder.args[arg].match_single(value) log_filter = filter_builder.deploy(self.web3) log_filter.log_entry_formatter = get_event_data(self._get_event_abi()) log_filter.builder = filter_builder return log_filter
def __init__( self, web3: Web3, abi: List[Any], address: HexAddress, event_name: str, from_block: int = 0, to_block: Union[int, str] = "latest", filters: Any = None, callback: Optional[Callable[..., Any]] = None, ): self.web3 = web3 self.event_name = event_name # Callback for every registered log self.callback = callback filter_kwargs = { "fromBlock": from_block, "toBlock": to_block, "address": address } event_abi = [ i for i in abi if i["type"] == "event" and i["name"] == event_name ] if len(event_abi) == 0: raise ValueError(f"Event of name {event_name} not found") self.event_abi = event_abi[0] assert self.event_abi filters = filters if filters else {} data_filter_set, filter_params = construct_event_filter_params( event_abi=self.event_abi, abi_codec=web3.codec, argument_filters=filters, **filter_kwargs, ) log_data_extract_fn = functools.partial(get_event_data, web3.codec, event_abi) self.filter = web3.eth.filter(filter_params) self.filter.set_data_filters(data_filter_set) self.filter.log_entry_formatter = log_data_extract_fn self.filter.filter_params = filter_params
def __init__( self, web3: Web3, abi: ABI, address: ChecksumAddress, event_name: str, from_block: BlockNumber = GenesisBlock, to_block: BlockIdentifier = "latest", filters: Any = None, callback: Optional[Callable[..., Any]] = None, ): self.web3 = web3 self.event_name = event_name # Callback for every registered log self.callback = callback event_abi = [ i for i in abi if i["type"] == "event" and i["name"] == event_name ] if len(event_abi) == 0: raise ValueError(f"Event of name {event_name} not found") self.event_abi = cast(ABIEvent, event_abi[0]) assert self.event_abi filters = filters if filters else {} data_filter_set, filter_params = construct_event_filter_params( event_abi=self.event_abi, abi_codec=web3.codec, contract_address=address, argument_filters=filters, fromBlock=from_block, toBlock=to_block, ) log_data_extract_fn = functools.partial(get_event_data, web3.codec, event_abi) self.filter: Web3LogFilter = web3.eth.filter( filter_params) # type: ignore self.filter.set_data_filters(data_filter_set) # type: ignore self.filter.log_entry_formatter = log_data_extract_fn self.filter.filter_params = filter_params
def createFilter( self, *, # PEP 3102 argument_filters=None, fromBlock=None, toBlock="latest", address=None, topics=None): """ Create filter object that tracks logs emitted by this contract event. :param filter_params: other parameters to limit the events """ if fromBlock is None: raise TypeError( "Missing mandatory keyword argument to createFilter: fromBlock" ) if argument_filters is None: argument_filters = dict() _filters = dict(**argument_filters) data_filter_set, event_filter_params = construct_event_filter_params( self._get_event_abi(), contract_address=self.address, argument_filters=_filters, fromBlock=fromBlock, toBlock=toBlock, address=address, topics=topics, ) log_data_extract_fn = functools.partial(get_event_data, self._get_event_abi()) log_filter = self.web3.eth.filter(event_filter_params) log_filter.set_data_filters(data_filter_set) log_filter.log_entry_formatter = log_data_extract_fn log_filter.filter_params = event_filter_params return log_filter
def __init__(self, web3, abi, address, event_name, from_block=0, to_block='latest', filters=None, callback=None): self.web3 = web3 self.event_name = event_name # Callback for every registered log self.callback = callback filter_kwargs = { 'fromBlock': from_block, 'toBlock': to_block, 'address': address, } event_abi = [i for i in abi if i['type'] == 'event' and i['name'] == event_name] if len(event_abi) == 0: return None self.event_abi = event_abi[0] assert self.event_abi filters = filters if filters else {} data_filter_set, filter_params = construct_event_filter_params( self.event_abi, argument_filters=filters, **filter_kwargs, ) log_data_extract_fn = functools.partial(get_event_data, event_abi) self.filter = web3.eth.filter(filter_params) self.filter.set_data_filters(data_filter_set) self.filter.log_entry_formatter = log_data_extract_fn self.filter.filter_params = filter_params
def _get_logs( self, web3: Optional[Web3], contract_address: ChecksumEthAddress, abi: List, event_name: str, argument_filters: Dict[str, Any], from_block: int, to_block: Union[int, Literal['latest']] = 'latest', ) -> List[Dict[str, Any]]: """Queries logs of an ethereum contract May raise: - RemoteError if etherscan is used and there is a problem with reaching it or with the returned result """ event_abi = find_matching_event_abi(abi=abi, event_name=event_name) _, filter_args = construct_event_filter_params( event_abi=event_abi, abi_codec=Web3().codec, contract_address=contract_address, argument_filters=argument_filters, fromBlock=from_block, toBlock=to_block, ) if event_abi['anonymous']: # web3.py does not handle the anonymous events correctly and adds the first topic filter_args['topics'] = filter_args['topics'][1:] events: List[Dict[str, Any]] = [] start_block = from_block if web3 is not None: events = _query_web3_get_logs( web3=web3, filter_args=filter_args, from_block=from_block, to_block=to_block, contract_address=contract_address, event_name=event_name, argument_filters=argument_filters, ) else: # etherscan until_block = (self.etherscan.get_latest_block_number() if to_block == 'latest' else to_block) blocks_step = 300000 while start_block <= until_block: while True: # loop to continuously reduce block range if need b end_block = min(start_block + blocks_step, until_block) try: new_events = self.etherscan.get_logs( contract_address=contract_address, topics=filter_args['topics'], # type: ignore from_block=start_block, to_block=end_block, ) except RemoteError as e: if 'Please select a smaller result dataset' in str(e): blocks_step = blocks_step // 2 if blocks_step < 100: raise # stop trying # else try with the smaller step continue # else some other error raise break # we must have a result # Turn all Hex ints to ints for e_idx, event in enumerate(new_events): try: block_number = deserialize_int_from_hex( symbol=event['blockNumber'], location='etherscan log query', ) log_index = deserialize_int_from_hex( symbol=event['logIndex'], location='etherscan log query', ) # Try to see if the event is a duplicate that got returned # in the previous iteration for previous_event in reversed(events): if previous_event['blockNumber'] < block_number: break same_event = (previous_event['logIndex'] == log_index and previous_event['transactionHash'] == event['transactionHash']) if same_event: events.pop() new_events[e_idx][ 'address'] = deserialize_ethereum_address( event['address'], ) new_events[e_idx]['blockNumber'] = block_number new_events[e_idx][ 'timeStamp'] = deserialize_int_from_hex( symbol=event['timeStamp'], location='etherscan log query', ) new_events[e_idx][ 'gasPrice'] = deserialize_int_from_hex( symbol=event['gasPrice'], location='etherscan log query', ) new_events[e_idx][ 'gasUsed'] = deserialize_int_from_hex( symbol=event['gasUsed'], location='etherscan log query', ) new_events[e_idx]['logIndex'] = log_index new_events[e_idx][ 'transactionIndex'] = deserialize_int_from_hex( symbol=event['transactionIndex'], location='etherscan log query', ) except DeserializationError as e: raise RemoteError( 'Couldnt decode an etherscan event due to {str(e)}}', ) from e # etherscan will only return 1000 events in one go. If more than 1000 # are returned such as when no filter args are provided then continue # the query from the last block if len(new_events) == 1000: start_block = new_events[-1]['blockNumber'] else: start_block = end_block + 1 events.extend(new_events) return events
def get_logs( self, contract_address: ChecksumEthAddress, abi: List, event_name: str, argument_filters: Dict[str, Any], from_block: int, to_block: Union[int, str] = 'latest', ) -> List[Dict[str, Any]]: """Queries logs of an ethereum contract May raise: - RemoteError if etherscan is used and there is a problem with reaching it or with the returned result """ event_abi = find_matching_event_abi(abi=abi, event_name=event_name) _, filter_args = construct_event_filter_params( event_abi=event_abi, abi_codec=Web3().codec, contract_address=contract_address, argument_filters=argument_filters, fromBlock=from_block, toBlock=to_block, ) if event_abi['anonymous']: # web3.py does not handle the anonymous events correctly and adds the first topic filter_args['topics'] = filter_args['topics'][1:] events: List[Dict[str, Any]] = [] start_block = from_block if self.connected: until_block = self.web3.eth.blockNumber if to_block == 'latest' else to_block while start_block <= until_block: filter_args['fromBlock'] = start_block end_block = min(start_block + 250000, until_block) filter_args['toBlock'] = end_block log.debug( 'Querying node for contract event', contract_address=contract_address, event_name=event_name, argument_filters=argument_filters, from_block=filter_args['fromBlock'], to_block=filter_args['toBlock'], ) # WTF: for some reason the first time we get in here the loop resets # to the start without querying eth_getLogs and ends up with double logging new_events = self.web3.eth.getLogs(filter_args) start_block = end_block + 1 events.extend(new_events) else: until_block = (self.etherscan.get_latest_block_number() if to_block == 'latest' else to_block) while start_block <= until_block: end_block = min(start_block + 300000, until_block) new_events = self.etherscan.get_logs( contract_address=contract_address, topics=filter_args['topics'], from_block=start_block, to_block=end_block, ) start_block = end_block + 1 events.extend(new_events) return events
def _get_logs( self, web3: Optional[Web3], contract_address: ChecksumEthAddress, abi: List, event_name: str, argument_filters: Dict[str, Any], from_block: int, to_block: Union[int, Literal['latest']] = 'latest', ) -> List[Dict[str, Any]]: """Queries logs of an ethereum contract May raise: - RemoteError if etherscan is used and there is a problem with reaching it or with the returned result """ event_abi = find_matching_event_abi(abi=abi, event_name=event_name) _, filter_args = construct_event_filter_params( event_abi=event_abi, abi_codec=Web3().codec, contract_address=contract_address, argument_filters=argument_filters, fromBlock=from_block, toBlock=to_block, ) if event_abi['anonymous']: # web3.py does not handle the anonymous events correctly and adds the first topic filter_args['topics'] = filter_args['topics'][1:] events: List[Dict[str, Any]] = [] start_block = from_block if web3 is not None: until_block = web3.eth.blockNumber if to_block == 'latest' else to_block while start_block <= until_block: filter_args['fromBlock'] = start_block end_block = min(start_block + 250000, until_block) filter_args['toBlock'] = end_block log.debug( 'Querying node for contract event', contract_address=contract_address, event_name=event_name, argument_filters=argument_filters, from_block=filter_args['fromBlock'], to_block=filter_args['toBlock'], ) # WTF: for some reason the first time we get in here the loop resets # to the start without querying eth_getLogs and ends up with double logging new_events_web3 = cast(List[Dict[str, Any]], web3.eth.getLogs(filter_args)) # Turn all HexBytes into hex strings for e_idx, event in enumerate(new_events_web3): new_events_web3[e_idx]['blockHash'] = event[ 'blockHash'].hex() new_topics = [] for topic in (event['topics']): new_topics.append(topic.hex()) new_events_web3[e_idx]['topics'] = new_topics new_events_web3[e_idx]['transactionHash'] = event[ 'transactionHash'].hex() start_block = end_block + 1 events.extend(new_events_web3) else: # etherscan until_block = (self.etherscan.get_latest_block_number() if to_block == 'latest' else to_block) while start_block <= until_block: end_block = min(start_block + 300000, until_block) new_events = self.etherscan.get_logs( contract_address=contract_address, topics=filter_args['topics'], # type: ignore from_block=start_block, to_block=end_block, ) # Turn all Hex ints to ints for e_idx, event in enumerate(new_events): try: new_events[e_idx]['address'] = to_checksum_address( event['address']) new_events[e_idx][ 'blockNumber'] = deserialize_int_from_hex( symbol=event['blockNumber'], location='etherscan log query', ) new_events[e_idx][ 'timeStamp'] = deserialize_int_from_hex( symbol=event['timeStamp'], location='etherscan log query', ) new_events[e_idx][ 'gasPrice'] = deserialize_int_from_hex( symbol=event['gasPrice'], location='etherscan log query', ) new_events[e_idx][ 'gasUsed'] = deserialize_int_from_hex( symbol=event['gasUsed'], location='etherscan log query', ) new_events[e_idx][ 'logIndex'] = deserialize_int_from_hex( symbol=event['logIndex'], location='etherscan log query', ) new_events[e_idx][ 'transactionIndex'] = deserialize_int_from_hex( symbol=event['transactionIndex'], location='etherscan log query', ) except DeserializationError as e: raise RemoteError( 'Couldnt decode an etherscan event due to {str(e)}}', ) from e start_block = end_block + 1 events.extend(new_events) return events
def getLogs(self, argument_filters=None, fromBlock=None, toBlock=None, blockHash=None): """Get events for this contract instance using eth_getLogs API. This is a stateless method, as opposed to createFilter. It can be safely called against nodes which do not provide eth_newFilter API, like Infura nodes. If there are many events, like ``Transfer`` events for a popular token, the Ethereum node might be overloaded and timeout on the underlying JSON-RPC call. Example - how to get all ERC-20 token transactions for the latest 10 blocks: .. code-block:: python from = max(mycontract.web3.eth.blockNumber - 10, 1) to = mycontract.web3.eth.blockNumber events = mycontract.events.Transfer.getLogs(fromBlock=from, toBlock=to) for e in events: print(e["args"]["from"], e["args"]["to"], e["args"]["value"]) The returned processed log values will look like: .. code-block:: python ( AttributeDict({ 'args': AttributeDict({}), 'event': 'LogNoArguments', 'logIndex': 0, 'transactionIndex': 0, 'transactionHash': HexBytes('...'), 'address': '0xF2E246BB76DF876Cef8b38ae84130F4F55De395b', 'blockHash': HexBytes('...'), 'blockNumber': 3 }), AttributeDict(...), ... ) See also: :func:`web3.middleware.filter.local_filter_middleware`. :param argument_filters: :param fromBlock: block number or "latest", defaults to "latest" :param toBlock: block number or "latest". Defaults to "latest" :param blockHash: block hash. blockHash cannot be set at the same time as fromBlock or toBlock :yield: Tuple of :class:`AttributeDict` instances """ if not self.address: raise TypeError("This method can be only called on " "an instated contract with an address") abi = self._get_event_abi() if argument_filters is None: argument_filters = dict() _filters = dict(**argument_filters) blkhash_set = blockHash is not None blknum_set = fromBlock is not None or toBlock is not None if blkhash_set and blknum_set: raise ValidationError('blockHash cannot be set at the same' ' time as fromBlock or toBlock') # Construct JSON-RPC raw filter presentation based on human readable Python descriptions # Namely, convert event names to their keccak signatures data_filter_set, event_filter_params = construct_event_filter_params( abi, contract_address=self.address, argument_filters=_filters, fromBlock=fromBlock, toBlock=toBlock, address=self.address, ) if blockHash is not None: event_filter_params['blockHash'] = blockHash # Call JSON-RPC API logs = self.web3.eth.getLogs(event_filter_params) # Convert raw binary data to Python proxy objects as described by ABI return tuple(get_event_data(abi, entry) for entry in logs)
def test_construct_event_filter_params(web3, event_abi, fn_kwargs, expected): _, actual = construct_event_filter_params(event_abi, web3.codec, **fn_kwargs) assert actual == expected
def test_construct_event_filter_params_for_data_filters( event_abi, web3, fn_kwargs, expected): actual, _ = construct_event_filter_params(event_abi, web3.codec, **fn_kwargs) assert actual == expected