def construct_event_topic_set(event_abi, arguments=None): if arguments is None: arguments = {} if isinstance(arguments, (list, tuple)): if len(arguments) != len(event_abi['inputs']): raise ValueError( "When passing an argument list, the number of arguments must " "match the event constructor." ) arguments = { arg['name']: [arg_value] for arg, arg_value in zip(event_abi['inputs'], arguments) } normalized_args = { key: value if is_list_like(value) else [value] for key, value in arguments.items() } event_topic = encode_hex(event_abi_to_log_topic(event_abi)) indexed_args = get_indexed_event_inputs(event_abi) zipped_abi_and_args = [ (arg, normalized_args.get(arg['name'], [None])) for arg in indexed_args ] encoded_args = [ [ None if option is None else encode_hex(encode_single(arg['type'], option)) for option in arg_options] for arg, arg_options in zipped_abi_and_args ] topics = list(normalize_topic_list([event_topic] + encoded_args)) return topics
def all_events_filter( self, from_block: typing.BlockSpecification = None, to_block: typing.BlockSpecification = None, ) -> typing.Tuple[Filter, Filter]: channel_topics = [ None, # event topic is any encode_hex(encode_single('bytes32', self.channel_identifier)), # channel_id ] # This will match the events: # ChannelOpened, ChannelNewDeposit, ChannelWithdraw, ChannelClosed, # NonClosingBalanceProofUpdated, ChannelSettled channel_filter = self.token_network.client.new_filter( contract_address=self.token_network.address, topics=channel_topics, from_block=from_block, to_block=to_block, ) # This will match the events: # ChannelUnlocked # # These topics must not be joined with the channel_filter, otherwise # the filter ChannelSettled wont match (observed with geth # 1.8.11-stable-dea1ce05) event_unlock_abi = CONTRACT_MANAGER.get_event_abi( CONTRACT_TOKEN_NETWORK, EVENT_CHANNEL_UNLOCKED, ) event_unlock_topic = encode_hex(event_abi_to_log_topic(event_unlock_abi)) participant1_topic = encode_hex(self.participant1.rjust(32, b'\0')) participant2_topic = encode_hex(self.participant2.rjust(32, b'\0')) unlock_topics = [ event_unlock_topic, [participant1_topic, participant2_topic], # event participant1 is us or them [participant2_topic, participant1_topic], # event participant2 is us or them ] unlock_filter = self.token_network.client.new_filter( contract_address=self.token_network.address, topics=unlock_topics, from_block=from_block, to_block=to_block, ) return channel_filter, unlock_filter
def secret_registered_filter( self, from_block: BlockSpecification = GENESIS_BLOCK_NUMBER, to_block: BlockSpecification = 'latest', ) -> StatelessFilter: event_abi = self.contract_manager.get_event_abi( CONTRACT_SECRET_REGISTRY, EVENT_SECRET_REVEALED, ) topics = [encode_hex(event_abi_to_log_topic(event_abi))] return self.client.new_filter( self.address, topics=topics, from_block=from_block, to_block=to_block, )
def __init__(self, strategy, vault, watch_events_forever): self.strategy = contract(strategy) self.vault = vault try: self.name = self.strategy.name() except ValueError: self.name = strategy[:10] self._views = safe_views(self.strategy.abi) self._harvests = [] self._topics = [[ encode_hex(event_abi_to_log_topic(event)) for event in self.strategy.abi if event["type"] == "event" and event["name"] in STRATEGY_EVENTS ]] self._watch_events_forever = watch_events_forever self._done = threading.Event() self._thread = threading.Thread(target=self.watch_events, daemon=True)
def secret_registered_filter( self, from_block: typing.BlockSpecification = 0, to_block: typing.BlockSpecification = 'latest', ) -> Filter: event_abi = CONTRACT_MANAGER.get_event_abi( CONTRACT_SECRET_REGISTRY, EVENT_SECRET_REVEALED, ) topics = [encode_hex(event_abi_to_log_topic(event_abi))] return self.client.new_filter( self.address, topics=topics, from_block=from_block, to_block=to_block, )
def secret_registered_filter( self, from_block: typing.BlockSpecification = 0, to_block: typing.BlockSpecification = 'latest', ) -> StatelessFilter: event_abi = CONTRACT_MANAGER.get_event_abi( CONTRACT_SECRET_REGISTRY, EVENT_SECRET_REVEALED, ) topics = [encode_hex(event_abi_to_log_topic(event_abi))] return self.client.new_filter( self.address, topics=topics, from_block=from_block, to_block=to_block, )
def tokenadded_filter( self, from_block: typing.BlockSpecification = 0, to_block: typing.BlockSpecification = 'latest', ) -> Filter: event_abi = CONTRACT_MANAGER.get_event_abi( CONTRACT_TOKEN_NETWORK_REGISTRY, EVENT_TOKEN_NETWORK_CREATED, ) topics = [encode_hex(event_abi_to_log_topic(event_abi))] registry_address_bin = self.proxy.contract_address return self.client.new_filter( registry_address_bin, topics=topics, from_block=from_block, to_block=to_block, )
def tokenadded_filter( self, from_block: BlockSpecification = GENESIS_BLOCK_NUMBER, to_block: BlockSpecification = 'latest', ) -> StatelessFilter: event_abi = self.contract_manager.get_event_abi( CONTRACT_TOKEN_NETWORK_REGISTRY, EVENT_TOKEN_NETWORK_CREATED, ) topics = [encode_hex(event_abi_to_log_topic(event_abi))] registry_address_bin = self.proxy.contract_address return self.client.new_filter( registry_address_bin, topics=topics, from_block=from_block, to_block=to_block, )
def channelnew_filter( self, from_block: typing.BlockSpecification = 0, to_block: typing.BlockSpecification = 'latest', ) -> Filter: """ Install a new filter for ChannelNew events. Args: from_block: Create filter starting from this block number (default: 0). to_block: Create filter stopping at this block number (default: 'latest'). Return: The filter instance. """ event_abi = CONTRACT_MANAGER.get_event_abi(CONTRACT_TOKEN_NETWORK, ChannelEvent.OPENED) event_id = encode_hex(event_abi_to_log_topic(event_abi)) topics = [event_id] return self.events_filter(topics, from_block, to_block)
def tokenadded_filter(self, from_block: Optional[BlockNumber] = None ) -> StatelessFilter: event_abi = find_matching_event_abi( abi=self.metadata.abi, event_name=EVENT_TOKEN_NETWORK_CREATED) topics: List[Optional[str]] = [ encode_hex(event_abi_to_log_topic(event_abi)) ] if from_block is None: from_block = self.metadata.filters_start_at registry_address_bin = self.proxy.contract_address return self.rpc_client.new_filter( contract_address=registry_address_bin, topics=topics, from_block=from_block)
def channelnew_filter( self, from_block: typing.BlockSpecification = 0, to_block: typing.BlockSpecification = 'latest', ) -> Filter: """ Install a new filter for ChannelNew events. Args: from_block: Create filter starting from this block number (default: 0). to_block: Create filter stopping at this block number (default: 'latest'). Return: The filter instance. """ event_abi = CONTRACT_MANAGER.get_event_abi(CONTRACT_TOKEN_NETWORK, EVENT_CHANNEL_OPENED) event_id = encode_hex(event_abi_to_log_topic(event_abi)) topics = [event_id] return self.events_filter(topics, from_block, to_block)
def decode_event(abi: ABI, event_log: LogReceipt) -> EventData: """Helper function to unpack event data using a provided ABI Args: abi: The ABI of the contract, not the ABI of the event event_log: The raw event data Returns: The decoded event """ event_id = event_log["topics"][0] events = filter_by_type("event", abi) topic_to_event_abi = { event_abi_to_log_topic(event_abi): event_abi for event_abi in events # type: ignore } event_abi = topic_to_event_abi[event_id] return get_event_data(ABI_CODEC, event_abi, event_log)
def decode_event(abi: ABI, log_: Dict) -> Dict: """ Helper function to unpack event data using a provided ABI Args: abi: The ABI of the contract, not the ABI of the event log_: The raw event data Returns: The decoded event """ if isinstance(log_["topics"][0], str): log_["topics"][0] = decode_hex(log_["topics"][0]) elif isinstance(log_["topics"][0], int): log_["topics"][0] = decode_hex(hex(log_["topics"][0])) event_id = log_["topics"][0] events = filter_by_type("event", abi) topic_to_event_abi = {event_abi_to_log_topic(event_abi): event_abi for event_abi in events} event_abi = topic_to_event_abi[event_id] return get_event_data(event_abi, log_)
def find_deposits( web3: Web3, service_address: Address, service_registry_contract: Contract, start_block: BlockNumber, ) -> List[Dict[str, Any]]: """ Return the address of the oldest deposit contract which is not withdrawn """ # Get RegisteredService events for service_address event_abi = dict( service_registry_contract.events[EVENT_REGISTERED_SERVICE]().abi) topics = [ event_abi_to_log_topic(event_abi), bytes([0] * 12) + service_address, ] filter_params = FilterParams({ "fromBlock": start_block, "toBlock": "latest", "address": service_registry_contract.address, "topics": [HexStr("0x" + t.hex()) for t in topics], }) raw_events = web3.eth.getLogs(filter_params) events = [ decode_event(service_registry_contract.abi, event) for event in raw_events ] # Bring events into a pleasant form return [ dict( block_number=e["blockNumber"], valid_till=datetime.utcfromtimestamp( e["args"]["valid_till"]).isoformat(" "), amount=e["args"]["deposit_amount"], deposit_contract=e["args"]["deposit_contract"], withdrawn=not web3.eth.getCode(e["args"]["deposit_contract"]), ) for e in events ]
def construct_event_topic_set( event_abi: ABIEvent, abi_codec: ABICodec, arguments: Optional[Union[Sequence[Any], Dict[str, Any]]] = None ) -> List[HexStr]: if arguments is None: arguments = {} if isinstance(arguments, (list, tuple)): if len(arguments) != len(event_abi['inputs']): raise ValueError( "When passing an argument list, the number of arguments must " "match the event constructor." ) arguments = { arg['name']: [arg_value] for arg, arg_value in zip(event_abi['inputs'], arguments) } normalized_args = { key: value if is_list_like(value) else [value] # type ignored b/c arguments is always a dict at this point for key, value in arguments.items() # type: ignore } # typed dict cannot be used w/ a normal Dict # https://github.com/python/mypy/issues/4976 event_topic = encode_hex(event_abi_to_log_topic(event_abi)) # type: ignore indexed_args = get_indexed_event_inputs(event_abi) zipped_abi_and_args = [ (arg, normalized_args.get(arg['name'], [None])) for arg in indexed_args ] encoded_args = [ [ None if option is None else encode_hex(abi_codec.encode_single(arg['type'], option)) for option in arg_options] for arg, arg_options in zipped_abi_and_args ] topics = list(normalize_topic_list([event_topic] + encoded_args)) # type: ignore return topics
def __init__(self, vault, api_version=None, token=None, registry=None): self._strategies = {} self._revoked = {} self.vault = vault self.api_version = api_version if token is None: token = vault.token() self.token = Contract(token) self.registry = registry self.scale = 10**self.vault.decimals() # multicall-safe views with 0 inputs and numeric output. self._views = safe_views(self.vault.abi) # load strategies from events and watch for freshly attached strategies self._topics = [[ encode_hex(event_abi_to_log_topic(event)) for event in self.vault.abi if event["type"] == "event" and event["name"] in STRATEGY_EVENTS ]] self._done = threading.Event() self._thread = threading.Thread(target=self.watch_events, daemon=True)
def get_event_logs(web3: Any, instance: Any, event_name: str, start_block: int, end_block: int, batch_size: Optional[int]) -> Iterator[Any]: """ Query the attached node for all events emitted by the given contract instance, with the given name. Yields an iterator of event-specific objects to be decoded by the caller. """ # It is possible to achieve this via the contract interface, with code of # the form: # # event = instance.events[event_name] # filter = event.createFilter(fromBlock=start_block, toBlock=to_block) # logs = web3.eth.getFilterLogs(filter) # # However, this creates filters on the host node, which may not be # permitted in all configurations. Hence, the code here iterates manually, # skpping events with other topics, from the same contract. contract_address = instance.address contract_event = instance.events[event_name]() event_abi = find_matching_event_abi(instance.abi, event_name=event_name) log_topic = event_abi_to_log_topic(cast(Dict[str, Any], event_abi)) batch_size = batch_size or SYNC_BLOCKS_PER_BATCH while start_block <= end_block: # Filters are *inclusive* wrt "toBlock", hence the -1 here, and +1 to # set start_block before iterating. to_block = min(start_block + batch_size - 1, end_block) filter_params = { 'fromBlock': start_block, 'toBlock': to_block, 'address': contract_address, } logs = web3.eth.getLogs(filter_params) for log in logs: if log_topic == log['topics'][0]: yield contract_event.processLog(log) start_block = to_block + 1
def construct_event_topic_set(event_abi, arguments=None): if arguments is None: arguments = {} if isinstance(arguments, (list, tuple)): if len(arguments) != len(event_abi['inputs']): raise ValueError( "When passing an argument list, the number of arguments must " "match the event constructor." ) arguments = { arg['name']: [arg_value] for arg, arg_value in zip(event_abi['inputs'], arguments) } normalized_args = { key: value if is_list_like(value) else [value] for key, value in arguments.items() } event_topic = encode_hex(event_abi_to_log_topic(event_abi)) indexed_args = get_indexed_event_inputs(event_abi) zipped_abi_and_args = [ (arg, normalized_args.get(arg['name'], [None])) for arg in indexed_args ] encoded_args = [ [ None if option is None else encode_hex(encode_single(arg['type'], option)) for option in arg_options] for arg, arg_options in zipped_abi_and_args ] topics = [ [event_topic] + list(permutation) if any(value is not None for value in permutation) else [event_topic] for permutation in itertools.product(*encoded_args) ] return topics
def decode_event(abi_codec: ABICodec, abi: ABI, log_: LogReceipt) -> Dict: """Helper function to unpack event data using a provided ABI Args: abi_codec: The ABI codec abi: The ABI of the contract, not the ABI of the event log_: The raw event data Returns: The decoded event """ event_id = log_["topics"][0] events = filter_by_type("event", abi) topic_to_event_abi = { event_abi_to_log_topic(event_abi): event_abi for event_abi in events # type: ignore } event_abi = topic_to_event_abi[event_id] event_data = get_event_data(abi_codec=abi_codec, event_abi=event_abi, log_entry=log_) return cast(Dict[Any, Any], event_data)
def __init__(self, contractInfo): self.w3 = web3.Web3() self.interface = self.w3.eth.contract(address=contractInfo["address"], abi=contractInfo["abi"]) self.address_string = contractInfo["address"] self.address = eth_utils.to_int(hexstr=self.address_string) self.abi = contractInfo["abi"] self.name = contractInfo["name"] self.funcs = {} self.functions = [] self.code = bytes.fromhex(contractInfo["code"][2:]) self.storage = {} if "storage" in contractInfo: raw_storage = contractInfo["storage"] for item in raw_storage: key = eth_utils.to_int(hexstr=item) self.storage[key] = eth_utils.to_int(hexstr=raw_storage[item]) self.address = eth_utils.to_int(hexstr=contractInfo["address"]) for func_interface in self.interface.abi: if func_interface["type"] == "function": id_bytes = eth_utils.function_abi_to_4byte_selector( func_interface) func_id = eth_utils.big_endian_to_int(id_bytes) self.funcs[func_id] = func_interface elif func_interface["type"] == "event": id_bytes = eth_utils.event_abi_to_log_topic(func_interface) func_id = eth_utils.big_endian_to_int(id_bytes) self.funcs[func_id] = func_interface funcs = [ x for x in dir(self.interface.functions) if x[0] != '_' and x != "abi" ] for func in funcs: setattr( ArbContract, func, generate_func(func, self.interface, contractInfo["address"])) setattr(ArbContract, "_" + func, generate_func2(func, self.interface)) self.functions.append(func)
def decode_event(abi: Dict, log: Dict): """ Helper function to unpack event data using a provided ABI Args: abi: The ABI of the contract, not the ABI of the event log: The raw event data Returns: The decoded event """ if isinstance(log['topics'][0], str): log['topics'][0] = decode_hex(log['topics'][0]) elif isinstance(log['topics'][0], int): log['topics'][0] = decode_hex(hex(log['topics'][0])) event_id = log['topics'][0] events = filter_by_type('event', abi) topic_to_event_abi = { event_abi_to_log_topic(event_abi): event_abi for event_abi in events } event_abi = topic_to_event_abi[event_id] return get_event_data(event_abi, log)
def parse_abi(self): '''for item in self.contract_abi: if (item["type"] != "constructor"): print(item["name"], " is a ", item["type"]) hash4 = function_signature_to_4byte_selector(item["name"] + '()') print("function hash4:", encode_hex(hash4))''' funclist = filter_by_type("function", self.contract_abi) for func in funclist: signature = abi_to_signature(func) selector = function_signature_to_4byte_selector(signature) # print(func) # print(signature) # print(encode_hex(selector) ) self.func_abi_map_by_selector[encode_hex(selector)] = func self.func_abi_map_by_name[func['name']] = func eventlist = filter_by_type("event", self.contract_abi) for event in eventlist: topic = event_abi_to_log_topic(event) # print(event) # print(encode_hex(topic) ) self.event_abi_map[encode_hex(topic)] = event
def get_netting_channel_deposit_events( chain: BlockChainService, token_network_address: Address, netting_channel_identifier: ChannelID, events: List[str] = ALL_EVENTS, from_block: BlockSpecification = 0, to_block: BlockSpecification = 'latest', ) -> List[Dict]: deposit_event_abi = CONTRACT_MANAGER.get_event_abi( CONTRACT_TOKEN_NETWORK, ChannelEvent.DEPOSIT, ) deposit_event_id = encode_hex(event_abi_to_log_topic(deposit_event_abi)) deposit_topics = [deposit_event_id] return get_contract_events( chain, CONTRACT_MANAGER.get_contract_abi(CONTRACT_TOKEN_NETWORK), token_network_address, deposit_topics, from_block, to_block, )
def get_netting_channel_settled_events( chain: BlockChainService, token_network_address: Address, netting_channel_identifier: ChannelID, events: List[str] = ALL_EVENTS, from_block: BlockSpecification = 0, to_block: BlockSpecification = 'latest', ) -> List[Dict]: settled_event_abi = CONTRACT_MANAGER.get_event_abi( CONTRACT_TOKEN_NETWORK, EVENT_CHANNEL_SETTLED, ) settled_event_id = encode_hex(event_abi_to_log_topic(settled_event_abi)) settled_topics = [settled_event_id] return get_contract_events( chain, CONTRACT_MANAGER.get_contract_abi(CONTRACT_TOKEN_NETWORK), token_network_address, settled_topics, from_block, to_block, )
def initialize_event_topics(event_abi: ABIEvent) -> Union[bytes, List[Any]]: if event_abi['anonymous'] is False: # https://github.com/python/mypy/issues/4976 return event_abi_to_log_topic(event_abi) # type: ignore else: return list()
def abi_to_event_topic(abi): return '0x' + event_abi_to_log_topic(abi).hex()
def initialize_event_topics(event_abi): if event_abi['anonymous'] is False: return event_abi_to_log_topic(event_abi) else: return list()
def get_event_data(abi_codec: ABICodec, event_abi: ABIEvent, log_entry: LogReceipt) -> EventData: """ Given an event ABI and a log entry for that event, return the decoded event data """ if event_abi['anonymous']: log_topics = log_entry['topics'] elif not log_entry['topics']: raise MismatchedABI("Expected non-anonymous event to have 1 or more topics") # type ignored b/c event_abi_to_log_topic(event_abi: Dict[str, Any]) elif event_abi_to_log_topic(event_abi) != log_entry['topics'][0]: # type: ignore raise MismatchedABI("The event signature did not match the provided ABI") else: log_topics = log_entry['topics'][1:] log_topics_abi = get_indexed_event_inputs(event_abi) log_topic_normalized_inputs = normalize_event_input_types(log_topics_abi) log_topic_types = get_event_abi_types_for_decoding(log_topic_normalized_inputs) log_topic_names = get_abi_input_names(ABIEvent({'inputs': log_topics_abi})) if len(log_topics) != len(log_topic_types): raise LogTopicError("Expected {0} log topics. Got {1}".format( len(log_topic_types), len(log_topics), )) log_data = hexstr_if_str(to_bytes, log_entry['data']) log_data_abi = exclude_indexed_event_inputs(event_abi) log_data_normalized_inputs = normalize_event_input_types(log_data_abi) log_data_types = get_event_abi_types_for_decoding(log_data_normalized_inputs) log_data_names = get_abi_input_names(ABIEvent({'inputs': log_data_abi})) # sanity check that there are not name intersections between the topic # names and the data argument names. duplicate_names = set(log_topic_names).intersection(log_data_names) if duplicate_names: raise InvalidEventABI( "The following argument names are duplicated " f"between event inputs: '{', '.join(duplicate_names)}'" ) decoded_log_data = abi_codec.decode_abi(log_data_types, log_data) normalized_log_data = map_abi_data( BASE_RETURN_NORMALIZERS, log_data_types, decoded_log_data ) decoded_topic_data = [ abi_codec.decode_single(topic_type, topic_data) for topic_type, topic_data in zip(log_topic_types, log_topics) ] normalized_topic_data = map_abi_data( BASE_RETURN_NORMALIZERS, log_topic_types, decoded_topic_data ) event_args = dict(itertools.chain( zip(log_topic_names, normalized_topic_data), zip(log_data_names, normalized_log_data), )) event_data = { 'args': event_args, 'event': event_abi['name'], 'logIndex': log_entry['logIndex'], 'transactionIndex': log_entry['transactionIndex'], 'transactionHash': log_entry['transactionHash'], 'address': log_entry['address'], 'blockHash': log_entry['blockHash'], 'blockNumber': log_entry['blockNumber'], } return cast(EventData, AttributeDict.recursive(event_data))
"type": "address" }, { "indexed": false, "name": "memo", "type": "string" } ], "name": "createEvent", "type": "event" }''' jsonobj = json.loads(eventABI) print(jsonobj) topicbytes = event_abi_to_log_topic(json.loads(eventABI)) print(encode_hex(topicbytes)) data = "0xffffffffffffffffffffffffffffffffffffffffff" \ "ffffffffffffffffffffff" \ "0000000000000000000000000" \ "0000000000000000000000000000000000000a0000000000000" \ "0000000000000000000000000000000000000000000" \ "000000000000000000000000000000000000000000" \ "0000000000000000000000000000000000000" \ "0000000000000000000" \ "0000000000000000000000000000000000000e00000000000000000000" \ "000000000000000000000000000000000000000" \ "0000036162630000000000000000000000000000000000000000000000000000" \ "00000000000000000000000000000000" \ "0000000000000000000000000000000000001f6e616d65206578697374" \ "73202c63616e27742063726561746520616761696e00"
def get_event_signatures(self, abi_list): signatures = dict() for abi in abi_list: if abi['type'] == 'event': signatures[abi['name']] = event_abi_to_log_topic(abi) return signatures
def get_event_processors(self, abi_list): processors = dict() for abi in abi_list: if abi['type'] == 'event': processors[event_abi_to_log_topic(abi)] = partial_fn(get_event_data, abi) return processors
def get_topics_of_events(abi: ABI) -> Dict[str, HexStr]: event_abis = filter_by_type("event", abi) return { ev["name"]: "0x" + event_abi_to_log_topic(ev).hex() for ev in event_abis # type: ignore }
def get_event_signature_from_abi(event_name: str) -> bytes: for function in get_smc_json()['abi']: if function['name'] == event_name and function['type'] == 'event': return event_abi_to_log_topic(function) raise ValueError("Event with name {} not found".format(event_name))
def calc_event_topic(abi_json: dict) -> bytes: ''' Calculate the event log topic (32 bytes) from the abi json''' e = EVENT(abi_json) return eth_utils.event_abi_to_log_topic(e)
def topic_from_event_name(self, name): abi = self.event_name_map[name] v = event_abi_to_log_topic(abi) return encode_hex(v)
def get_event_data(event_abi, log_entry): """ Given an event ABI and a log entry for that event, return the decoded event data """ if event_abi['anonymous']: log_topics = log_entry['topics'] elif not log_entry['topics']: raise MismatchedABI("Expected non-anonymous event to have 1 or more topics") elif event_abi_to_log_topic(event_abi) != log_entry['topics'][0]: raise MismatchedABI("The event signature did not match the provided ABI") else: log_topics = log_entry['topics'][1:] log_topics_abi = get_indexed_event_inputs(event_abi) log_topic_normalized_inputs = normalize_event_input_types(log_topics_abi) log_topic_types = get_event_abi_types_for_decoding(log_topic_normalized_inputs) log_topic_names = get_abi_input_names({'inputs': log_topics_abi}) if len(log_topics) != len(log_topic_types): raise ValueError("Expected {0} log topics. Got {1}".format( len(log_topic_types), len(log_topics), )) log_data = hexstr_if_str(to_bytes, log_entry['data']) log_data_abi = exclude_indexed_event_inputs(event_abi) log_data_normalized_inputs = normalize_event_input_types(log_data_abi) log_data_types = get_event_abi_types_for_decoding(log_data_normalized_inputs) log_data_names = get_abi_input_names({'inputs': log_data_abi}) # sanity check that there are not name intersections between the topic # names and the data argument names. duplicate_names = set(log_topic_names).intersection(log_data_names) if duplicate_names: raise ValueError( "Invalid Event ABI: The following argument names are duplicated " "between event inputs: '{0}'".format(', '.join(duplicate_names)) ) decoded_log_data = decode_abi(log_data_types, log_data) normalized_log_data = map_abi_data( BASE_RETURN_NORMALIZERS, log_data_types, decoded_log_data ) decoded_topic_data = [ decode_single(topic_type, topic_data) for topic_type, topic_data in zip(log_topic_types, log_topics) ] normalized_topic_data = map_abi_data( BASE_RETURN_NORMALIZERS, log_topic_types, decoded_topic_data ) event_args = dict(itertools.chain( zip(log_topic_names, normalized_topic_data), zip(log_data_names, normalized_log_data), )) event_data = { 'args': event_args, 'event': event_abi['name'], 'logIndex': log_entry['logIndex'], 'transactionIndex': log_entry['transactionIndex'], 'transactionHash': log_entry['transactionHash'], 'address': log_entry['address'], 'blockHash': log_entry['blockHash'], 'blockNumber': log_entry['blockNumber'], } return AttributeDict.recursive(event_data)