def solidityKeccak(abi_types, values, validity_check=False): """ Executes keccak256 exactly as Solidity does. Takes list of abi_types as inputs -- `[uint24, int8[], bool]` and list of corresponding values -- `[20, [-1, 5, 0], True]` Adapted from web3.py """ if len(abi_types) != len(values): raise ValueError( "Length mismatch between provided abi types and values. Got " "{0} types and {1} values.".format(len(abi_types), len(values)) ) if validity_check: for t, v in zip(abi_types, values): if not is_encodable(t, v): print(f'Value {v} is not encodable for ABI type {t}') return False hex_string = eth_utils.add_0x_prefix(''.join( encode_single_packed(abi_type, value).hex() for abi_type, value in zip(abi_types, values) )) # hex_string = encode_abi_packed(abi_types, values).hex() return eth_utils.keccak(hexstr=hex_string)
def matches_args( function, function_args) -> bool: ''' Checks whether eth_abi will encode each argument with the expected type ''' for i in range(len(function_args)): inputs = function['inputs'] if not eth_abi.is_encodable(inputs[i]['type'], function_args[i]): return False return True
def check_if_arguments_can_be_encoded(function_abi, args, kwargs): try: arguments = merge_args_and_kwargs(function_abi, args, kwargs) except TypeError: return False if len(function_abi.get('inputs', [])) != len(arguments): return False types = get_abi_input_types(function_abi) return all( is_encodable(_type, arg) for _type, arg in zip(types, arguments))
def match_fn(match_values_and_abi, data): """Match function used for filtering non-indexed event arguments. Values provided through the match_values_and_abi parameter are compared to the abi decoded log data. """ abi_types, all_match_values = zip(*match_values_and_abi) decoded_values = decode_abi(abi_types, HexBytes(data)) for data_value, match_values, abi_type in zip(decoded_values, all_match_values, abi_types): if match_values is None: continue normalized_data = normalize_data_values(abi_type, data_value) for value in match_values: if not is_encodable(abi_type, value): raise ValueError( "Value {0} is of the wrong abi type. " "Expected {1} typed value.".format(value, abi_type)) if value == normalized_data: break else: return False return True
def _encode_data(primary_type, types, data): # Add typehash yield "bytes32", hash_struct_type(primary_type, types) # Add field contents for field in types[primary_type]: value = data[field["name"]] if field["type"] == "string": if not isinstance(value, str): raise TypeError( "Value of `{0}` ({2}) in the struct `{1}` is of the type `{3}`, but expected " "string value".format( field["name"], primary_type, value, type(value), )) # Special case where the values need to be keccak hashed before they are encoded hashed_value = keccak(text=value) yield "bytes32", hashed_value elif field["type"] == "bytes": if not isinstance(value, bytes): raise TypeError( "Value of `{0}` ({2}) in the struct `{1}` is of the type `{3}`, but expected " "bytes value".format( field["name"], primary_type, value, type(value), )) # Special case where the values need to be keccak hashed before they are encoded hashed_value = keccak(primitive=value) yield "bytes32", hashed_value elif field["type"] in types: # This means that this type is a user defined type hashed_value = keccak( primitive=encode_data(field["type"], types, value)) yield "bytes32", hashed_value elif is_array_type(field["type"]): # Get the dimensions from the value array_dimensions = get_array_dimensions(value) # Get the dimensions from what was declared in the schema parsed_type = parse(field["type"]) for i in range(len(array_dimensions)): if len(parsed_type.arrlist[i]) == 0: # Skip empty or dynamically declared dimensions continue if array_dimensions[i] != parsed_type.arrlist[i][0]: # Dimensions should match with declared schema raise TypeError( "Array data `{0}` has dimensions `{1}` whereas the " "schema has dimensions `{2}`".format( value, array_dimensions, tuple(map(lambda x: x[0], parsed_type.arrlist)), )) array_items = flatten_multidimensional_array(value) array_items_encoding = [ encode_data(parsed_type.base, types, array_item) for array_item in array_items ] concatenated_array_encodings = b''.join(array_items_encoding) hashed_value = keccak(concatenated_array_encodings) yield "bytes32", hashed_value else: # First checking to see if type is valid as per abi if not is_encodable_type(field["type"]): raise TypeError( "Received Invalid type `{0}` in the struct `{1}`".format( field["type"], primary_type, )) # Next see if the data fits the specified encoding type if is_encodable(field["type"], value): # field["type"] is a valid type and this value corresponds to that type. yield field["type"], value else: raise TypeError( "Value of `{0}` ({2}) in the struct `{1}` is of the type `{3}`, but expected " "{4} value".format( field["name"], primary_type, value, type(value), field["type"], ))
def first_pass_check_tuple_arr(cls, param_list, param_name, param_type, conversion_errors): # we expect the entire type string in case of tuples to be passed here # because we gon run a check using the eth_abi.is_encodable feature error_flag = not is_encodable(param_type, param_list) return param_list, error_flag
def test_is_encodable_returns_true_for_random_valid_tuple_values( type_and_value): _type, value = type_and_value assert is_encodable(_type, value)
def test_is_encodable_returns_false(type_str, python_value): assert not is_encodable(type_str, python_value)
def test_is_encodable_returns_true(type_str, python_value, _1, _2): assert is_encodable(type_str, python_value)
def test_is_encodable_returns_true_for_random_valid_tuple_values(type_and_value): _type, value = type_and_value assert is_encodable(_type, value)