def decode_event_data(data: str, event_abi: Dict[str, Any]) -> Tuple: """Decode the data of an event according to the event's abi entry""" log_data = hexstr_if_str(to_bytes, data) log_data_abi = exclude_indexed_event_inputs(event_abi) # type: ignore log_data_normalized_inputs = normalize_event_input_types(log_data_abi) log_data_types = get_event_abi_types_for_decoding(log_data_normalized_inputs) decoded_log_data = ABI_CODEC.decode_abi(log_data_types, log_data) return decoded_log_data
def decode_log(log_data, events): topic_inputs = get_indexed_event_inputs(log_data['abi']) topic_types = get_abi_input_types({'inputs': topic_inputs}) topic_names = get_abi_input_names({'inputs': topic_inputs}) topic_data = [HexBytes(topic) for topic in log_data['topics'][1:]] decoded_topics = [ decode_single(topic_type, topic_data) for topic_type, topic_data in zip(topic_types, topic_data) ] data_inputs = exclude_indexed_event_inputs(log_data['abi']) data_types = get_abi_input_types({'inputs': data_inputs}) data_names = get_abi_input_names({'inputs': data_inputs}) decoded_data = decode_abi(data_types, HexBytes(log_data['data'])) return dict( itertools.chain( zip(topic_names, decoded_topics), zip(data_names, decoded_data), ))
def construct_event_data_set( event_abi: ABIEvent, abi_codec: ABICodec, arguments: Optional[Union[Sequence[Any], Dict[str, Any]]] = None ) -> List[List[Optional[HexStr]]]: if arguments is None: arguments = {} if isinstance(arguments, (list, tuple)): if len(arguments) != len(event_abi['inputs']): raise ValueError( "When passing an argument list, the number of arguments must " "match the event constructor." ) arguments = { arg['name']: [arg_value] for arg, arg_value in zip(event_abi['inputs'], arguments) } normalized_args = { key: value if is_list_like(value) else [value] # type ignored b/c at this point arguments is always a dict for key, value in arguments.items() # type: ignore } non_indexed_args = exclude_indexed_event_inputs(event_abi) zipped_abi_and_args = [ (arg, normalized_args.get(arg['name'], [None])) for arg in non_indexed_args ] encoded_args = [ [ None if option is None else encode_hex(abi_codec.encode_single(arg['type'], option)) for option in arg_options] for arg, arg_options in zipped_abi_and_args ] data = [ list(permutation) if any(value is not None for value in permutation) else [] for permutation in itertools.product(*encoded_args) ] return data
def get_event_data(abi_codec: ABICodec, event_abi: ABIEvent, log_entry: LogReceipt) -> EventData: """ Given an event ABI and a log entry for that event, return the decoded event data """ if event_abi['anonymous']: log_topics = log_entry['topics'] elif not log_entry['topics']: raise MismatchedABI("Expected non-anonymous event to have 1 or more topics") # type ignored b/c event_abi_to_log_topic(event_abi: Dict[str, Any]) elif event_abi_to_log_topic(event_abi) != log_entry['topics'][0]: # type: ignore raise MismatchedABI("The event signature did not match the provided ABI") else: log_topics = log_entry['topics'][1:] log_topics_abi = get_indexed_event_inputs(event_abi) log_topic_normalized_inputs = normalize_event_input_types(log_topics_abi) log_topic_types = get_event_abi_types_for_decoding(log_topic_normalized_inputs) log_topic_names = get_abi_input_names(ABIEvent({'inputs': log_topics_abi})) if len(log_topics) != len(log_topic_types): raise LogTopicError("Expected {0} log topics. Got {1}".format( len(log_topic_types), len(log_topics), )) log_data = hexstr_if_str(to_bytes, log_entry['data']) log_data_abi = exclude_indexed_event_inputs(event_abi) log_data_normalized_inputs = normalize_event_input_types(log_data_abi) log_data_types = get_event_abi_types_for_decoding(log_data_normalized_inputs) log_data_names = get_abi_input_names(ABIEvent({'inputs': log_data_abi})) # sanity check that there are not name intersections between the topic # names and the data argument names. duplicate_names = set(log_topic_names).intersection(log_data_names) if duplicate_names: raise InvalidEventABI( "The following argument names are duplicated " f"between event inputs: '{', '.join(duplicate_names)}'" ) decoded_log_data = abi_codec.decode_abi(log_data_types, log_data) normalized_log_data = map_abi_data( BASE_RETURN_NORMALIZERS, log_data_types, decoded_log_data ) decoded_topic_data = [ abi_codec.decode_single(topic_type, topic_data) for topic_type, topic_data in zip(log_topic_types, log_topics) ] normalized_topic_data = map_abi_data( BASE_RETURN_NORMALIZERS, log_topic_types, decoded_topic_data ) event_args = dict(itertools.chain( zip(log_topic_names, normalized_topic_data), zip(log_data_names, normalized_log_data), )) event_data = { 'args': event_args, 'event': event_abi['name'], 'logIndex': log_entry['logIndex'], 'transactionIndex': log_entry['transactionIndex'], 'transactionHash': log_entry['transactionHash'], 'address': log_entry['address'], 'blockHash': log_entry['blockHash'], 'blockNumber': log_entry['blockNumber'], } return cast(EventData, AttributeDict.recursive(event_data))