def field_type(self, field: str) -> str: """ Looks up ``field`` via type annotations, returning the underlying ABI type (e.g. ``"uint256"``) or :class:`EIP712Type`. Raises ``KeyError`` if the field doesn't exist. """ typ = self.__annotations__[field] if isinstance(typ, str): if not is_encodable_type(typ): raise ValidationError( f"'{field}: {typ}' is not a valid ABI type") return typ elif issubclass(typ, EIP712Type): return typ.type else: raise ValidationError( f"'{field}' type annotation must either be a subclass of " f"`EIP712Type` or valid ABI Type string, not {typ.__name__}")
def _encode_data(primary_type, types, data): # Add typehash yield "bytes32", hash_struct_type(primary_type, types) # Add field contents for field in types[primary_type]: value = data[field["name"]] if field["type"] == "string": if not isinstance(value, str): raise TypeError( "Value of `{0}` ({2}) in the struct `{1}` is of the type `{3}`, but expected " "string value".format( field["name"], primary_type, value, type(value), )) # Special case where the values need to be keccak hashed before they are encoded hashed_value = keccak(text=value) yield "bytes32", hashed_value elif field["type"] == "bytes": if not isinstance(value, bytes): raise TypeError( "Value of `{0}` ({2}) in the struct `{1}` is of the type `{3}`, but expected " "bytes value".format( field["name"], primary_type, value, type(value), )) # Special case where the values need to be keccak hashed before they are encoded hashed_value = keccak(primitive=value) yield "bytes32", hashed_value elif field["type"] in types: # This means that this type is a user defined type hashed_value = keccak( primitive=encode_data(field["type"], types, value)) yield "bytes32", hashed_value elif is_array_type(field["type"]): # Get the dimensions from the value array_dimensions = get_array_dimensions(value) # Get the dimensions from what was declared in the schema parsed_type = parse(field["type"]) for i in range(len(array_dimensions)): if len(parsed_type.arrlist[i]) == 0: # Skip empty or dynamically declared dimensions continue if array_dimensions[i] != parsed_type.arrlist[i][0]: # Dimensions should match with declared schema raise TypeError( "Array data `{0}` has dimensions `{1}` whereas the " "schema has dimensions `{2}`".format( value, array_dimensions, tuple(map(lambda x: x[0], parsed_type.arrlist)), )) array_items = flatten_multidimensional_array(value) array_items_encoding = [ encode_data(parsed_type.base, types, array_item) for array_item in array_items ] concatenated_array_encodings = b''.join(array_items_encoding) hashed_value = keccak(concatenated_array_encodings) yield "bytes32", hashed_value else: # First checking to see if type is valid as per abi if not is_encodable_type(field["type"]): raise TypeError( "Received Invalid type `{0}` in the struct `{1}`".format( field["type"], primary_type, )) # Next see if the data fits the specified encoding type if is_encodable(field["type"], value): # field["type"] is a valid type and this value corresponds to that type. yield field["type"], value else: raise TypeError( "Value of `{0}` ({2}) in the struct `{1}` is of the type `{3}`, but expected " "{4} value".format( field["name"], primary_type, value, type(value), field["type"], ))
def test_is_encodable_type_returns_false(): assert not is_encodable_type('foo')
def test_is_encodable_type_returns_true(type_str, _python_value, _1, _2): assert is_encodable_type(type_str)