Esempio n. 1
0
def test_hexstr_if_str_on_invalid_hex(val):
    try:
        is_hexstr = (is_hex(val) or val == '')
    except ValueError:
        is_hexstr = False

    if not is_hexstr:
        with pytest.raises(ValueError):
            hexstr_if_str(Mock(), val)
Esempio n. 2
0
def abi_bytes_to_hex(abi_type, data):
    base, sub, arrlist = process_type(abi_type)
    if base == 'bytes' and not arrlist:
        bytes_data = hexstr_if_str(to_bytes, data)
        if not sub:
            return abi_type, to_hex(bytes_data)
        else:
            num_bytes = int(sub)
            if len(bytes_data) <= num_bytes:
                padded = bytes_data.ljust(num_bytes, b'\0')
                return abi_type, to_hex(padded)
            else:
                raise ValueError(
                    "This value was expected to be at most %d bytes, but instead was %d: %r"
                    % ((num_bytes, len(bytes_data), data)))
Esempio n. 3
0
def test_hexstr_if_str_curried():
    converter = hexstr_if_str(to_hex)
    assert converter(255) == '0xff'
Esempio n. 4
0
def test_hexstr_if_str_passthrough(val):
    to_type = Mock(return_value='zoot')
    assert hexstr_if_str(to_type, val) == 'zoot'
    assert to_type.call_args == ((val, ), {'hexstr': None})
Esempio n. 5
0
def test_hexstr_if_str_on_valid_hex(val):
    to_type = Mock(return_value='zoot')
    assert hexstr_if_str(to_type, val) == 'zoot'
    assert to_type.call_args == ((None, ), {'hexstr': val})
Esempio n. 6
0
     'platon_getUncleByBlockNumberAndIndex': compose(
         apply_formatter_at_index(block_number_formatter, 0),
         apply_formatter_at_index(integer_to_hex, 1),
     ),
     'platon_getUncleByBlockHashAndIndex': apply_formatter_at_index(integer_to_hex, 1),
     'platon_newFilter': apply_formatter_at_index(filter_params_formatter, 0),
     'platon_getLogs': apply_formatter_at_index(filter_params_formatter, 0),
     'platon_call': combine_argument_formatters(
         transaction_param_formatter,
         block_number_formatter,
     ),
     'platon_estimateGas': apply_formatter_at_index(transaction_param_formatter, 0),
     'platon_sendTransaction': apply_formatter_at_index(transaction_param_formatter, 0),
     # personal
     'personal_importRawKey': apply_formatter_at_index(
         compose(remove_0x_prefix, hexstr_if_str(to_hex)),
         0,
     ),
     'personal_sign': apply_formatter_at_index(text_if_str(to_hex), 0),
     'personal_ecRecover': apply_formatter_at_index(text_if_str(to_hex), 0),
     'personal_sendTransaction': apply_formatter_at_index(transaction_param_formatter, 0),
     # Snapshot and Revert
     'evm_revert': apply_formatter_at_index(integer_to_hex, 0),
     'trace_replayBlockTransactions': apply_formatter_at_index(block_number_formatter, 0),
     'trace_block': apply_formatter_at_index(block_number_formatter, 0),
     'trace_call': compose(
         apply_formatter_at_index(transaction_param_formatter, 0),
         apply_formatter_at_index(block_number_formatter, 2)
     ),
 },
 result_formatters={
Esempio n. 7
0
def abi_int_to_hex(abi_type, data):
    base, _sub, arrlist = process_type(abi_type)
    if base == 'uint' and not arrlist:
        return abi_type, hexstr_if_str(to_hex, data)
Esempio n. 8
0
def abi_bytes_to_bytes(abi_type, data):
    base, sub, arrlist = process_type(abi_type)
    if base == 'bytes' and not arrlist:
        return abi_type, hexstr_if_str(to_bytes, data)
Esempio n. 9
0
def get_event_data(event_abi, log_entry):
    """
    Given an event ABI and a log entry for that event, return the decoded
    event data
    """
    if event_abi['anonymous']:
        log_topics = log_entry['topics']
    elif not log_entry['topics']:
        raise MismatchedABI("Expected non-anonymous event to have 1 or more topics")
    elif event_abi_to_log_topic(event_abi) != log_entry['topics'][0]:
        raise MismatchedABI("The event signature did not match the provided ABI")
    else:
        log_topics = log_entry['topics'][1:]

    log_topics_abi = get_indexed_event_inputs(event_abi)
    log_topic_normalized_inputs = normalize_event_input_types(log_topics_abi)
    log_topic_types = get_event_abi_types_for_decoding(log_topic_normalized_inputs)
    log_topic_names = get_abi_input_names({'inputs': log_topics_abi})

    if len(log_topics) != len(log_topic_types):
        raise ValueError("Expected {0} log topics.  Got {1}".format(
            len(log_topic_types),
            len(log_topics),
        ))

    log_data = hexstr_if_str(to_bytes, log_entry['data'])
    log_data_abi = exclude_indexed_event_inputs(event_abi)
    log_data_normalized_inputs = normalize_event_input_types(log_data_abi)
    log_data_types = get_event_abi_types_for_decoding(log_data_normalized_inputs)
    log_data_names = get_abi_input_names({'inputs': log_data_abi})

    # sanity check that there are not name intersections between the topic
    # names and the data argument names.
    duplicate_names = set(log_topic_names).intersection(log_data_names)
    if duplicate_names:
        raise ValueError(
            "Invalid Event ABI:  The following argument names are duplicated "
            "between event inputs: '{0}'".format(', '.join(duplicate_names))
        )

    decoded_log_data = decode_abi(log_data_types, log_data)
    normalized_log_data = map_abi_data(
        BASE_RETURN_NORMALIZERS,
        log_data_types,
        decoded_log_data
    )

    decoded_topic_data = [
        decode_single(topic_type, topic_data)
        for topic_type, topic_data
        in zip(log_topic_types, log_topics)
    ]
    normalized_topic_data = map_abi_data(
        BASE_RETURN_NORMALIZERS,
        log_topic_types,
        decoded_topic_data
    )

    event_args = dict(itertools.chain(
        zip(log_topic_names, normalized_topic_data),
        zip(log_data_names, normalized_log_data),
    ))

    event_data = {
        'args': event_args,
        'event': event_abi['name'],
        'logIndex': log_entry['logIndex'],
        'transactionIndex': log_entry['transactionIndex'],
        'transactionHash': log_entry['transactionHash'],
        'address': log_entry['address'],
        'blockHash': log_entry['blockHash'],
        'blockNumber': log_entry['blockNumber'],
    }

    return AttributeDict.recursive(event_data)