def test_parsing_invalid_type_str_causes_parse_error(type_str, error_col): if error_col is not None: pattern = r'Parse error at .* \(column {}\)'.format(error_col) else: pattern = r'Parse error at .*' with pytest.raises(ParseError, match=pattern): parse(type_str)
def test_decode_abi(type_str, expected, abi_encoding, _): abi_type = parse(type_str) if abi_type.arrlist is not None: pytest.skip('ABI coding functions do not support array types') types = [t.to_type_str() for t in abi_type.components] actual = decode_abi(types, abi_encoding) assert actual == expected
def test_encode_abi(type_str, python_value, _, packed_encoding): abi_type = parse(type_str) if abi_type.arrlist is not None: pytest.skip('ABI coding functions do not support array types') types = [t.to_type_str() for t in abi_type.components] actual = encode_abi_packed(types, python_value) assert actual == packed_encoding
def test_valid_abi_types(type_str): parse(type_str).validate()
def test_invalid_abi_types(type_str, pattern): with pytest.raises(ABITypeError, match=pattern): parse(type_str).validate()
def test_parse_raises_type_error_for_wrong_data_type(): with pytest.raises(TypeError): parse(b'uint256')
def is_array_type(type): # Identify if type such as "person[]" or "person[2]" is an array abi_type = parse(type) return abi_type.is_array
def test_decode_abi(type_str, expected, byte_str): abi_type = parse(type_str) types = [str(t) for t in abi_type.components] actual = decode_abi(types, byte_str) assert actual == expected
def test_parsing_with_parsimonious_grammar_and_node_visitor_works(type_str, expected_type): assert parse(type_str) == expected_type
def test_normalizing_and_parsing_works(type_str, expected_type): assert parse(normalize(type_str)) == expected_type
def test_abi_type_lacks_dynamic_arrlist(type_str): abi_type = parse(type_str) assert not abi_type._has_dynamic_arrlist
def test_abi_type_is_not_array(type_str): abi_type = parse(type_str) assert not abi_type.is_array
def test_abi_type_static_types(type_str): abi_type = parse(type_str) assert not abi_type.is_dynamic
def test_basic_type_item_type_throws_errors(): bt = parse('int256') pattern = "Cannot determine item type for non-array type 'int256'" with pytest.raises(ValueError, match=pattern): bt.item_type
def test_parsing_with_parsimonious_grammar_and_node_visitor_works( type_str, expected_type): assert parse(type_str) == expected_type
def _encode_data(primary_type, types, data): # Add typehash yield "bytes32", hash_struct_type(primary_type, types) # Add field contents for field in types[primary_type]: value = data[field["name"]] if field["type"] == "string": if not isinstance(value, str): raise TypeError( "Value of `{0}` ({2}) in the struct `{1}` is of the type `{3}`, but expected " "string value".format( field["name"], primary_type, value, type(value), )) # Special case where the values need to be keccak hashed before they are encoded hashed_value = keccak(text=value) yield "bytes32", hashed_value elif field["type"] == "bytes": if not isinstance(value, bytes): raise TypeError( "Value of `{0}` ({2}) in the struct `{1}` is of the type `{3}`, but expected " "bytes value".format( field["name"], primary_type, value, type(value), )) # Special case where the values need to be keccak hashed before they are encoded hashed_value = keccak(primitive=value) yield "bytes32", hashed_value elif field["type"] in types: # This means that this type is a user defined type hashed_value = keccak( primitive=encode_data(field["type"], types, value)) yield "bytes32", hashed_value elif is_array_type(field["type"]): # Get the dimensions from the value array_dimensions = get_array_dimensions(value) # Get the dimensions from what was declared in the schema parsed_type = parse(field["type"]) for i in range(len(array_dimensions)): if len(parsed_type.arrlist[i]) == 0: # Skip empty or dynamically declared dimensions continue if array_dimensions[i] != parsed_type.arrlist[i][0]: # Dimensions should match with declared schema raise TypeError( "Array data `{0}` has dimensions `{1}` whereas the " "schema has dimensions `{2}`".format( value, array_dimensions, tuple(map(lambda x: x[0], parsed_type.arrlist)), )) array_items = flatten_multidimensional_array(value) array_items_encoding = [ encode_data(parsed_type.base, types, array_item) for array_item in array_items ] concatenated_array_encodings = ''.join(array_items_encoding) hashed_value = keccak(concatenated_array_encodings) yield "bytes32", hashed_value else: # First checking to see if type is valid as per abi if not is_valid_abi_type(field["type"]): raise TypeError( "Received Invalid type `{0}` in the struct `{1}`".format( field["type"], primary_type, )) # Next see if the data fits the specified encoding type if is_encodable(field["type"], value): # field["type"] is a valid type and this value corresponds to that type. yield field["type"], value else: raise TypeError( "Value of `{0}` ({2}) in the struct `{1}` is of the type `{3}`, but expected " "{4} value".format( field["name"], primary_type, value, type(value), field["type"], ))
def is_dynamic_sized_type(type_str: TypeStr) -> bool: abi_type = grammar.parse(type_str) return abi_type.is_dynamic
def _get_abi_types(abi_params: List) -> Sequence[ABIType]: type_str = f"({','.join(get_type_strings(abi_params))})" tuple_type = parse(type_str) return tuple_type.components
def test_end_to_end_parsing_and_collapsing(type_str): assert parse(type_str).to_type_str() == type_str
def test_end_to_end_parsing_and_collapsing(type_str): assert str(parse(type_str)) == type_str
def test_encode_abi(type_str, python_value, expected): abi_type = parse(type_str) types = [str(t) for t in abi_type.components] actual = encode_abi(types, python_value) assert actual == expected