def make_arg_clamper(datapos, mempos, typ, is_init=False): """ Clamps argument to type limits. """ if not is_init: data_decl = ['calldataload', ['add', 4, datapos]] copier = functools.partial(_mk_calldatacopy_copier, mempos=mempos) else: data_decl = ['codeload', ['add', '~codelen', datapos]] copier = functools.partial(_mk_codecopy_copier, mempos=mempos) # Numbers: make sure they're in range if is_base_type(typ, 'int128'): return LLLnode.from_list([ 'clamp', ['mload', MemoryPositions.MINNUM], data_decl, ['mload', MemoryPositions.MAXNUM] ], typ=typ, annotation='checking int128 input') # Booleans: make sure they're zero or one elif is_base_type(typ, 'bool'): return LLLnode.from_list( ['uclamplt', data_decl, 2], typ=typ, annotation='checking bool input', ) # Addresses: make sure they're in range elif is_base_type(typ, 'address'): return LLLnode.from_list( ['uclamplt', data_decl, ['mload', MemoryPositions.ADDRSIZE]], typ=typ, annotation='checking address input', ) # Bytes: make sure they have the right size elif isinstance(typ, ByteArrayLike): return LLLnode.from_list([ 'seq', copier(data_decl, 32 + typ.maxlen), [ 'assert', ['le', ['calldataload', ['add', 4, data_decl]], typ.maxlen] ] ], typ=None, annotation='checking bytearray input') # Lists: recurse elif isinstance(typ, ListType): o = [] for i in range(typ.count): offset = get_size_of_type(typ.subtype) * 32 * i o.append( make_arg_clamper(datapos + offset, mempos + offset, typ.subtype, is_init)) return LLLnode.from_list(['seq'] + o, typ=None, annotation='checking list input') # Otherwise don't make any checks else: return LLLnode.from_list('pass')
def pack_args_by_32(holder, maxlen, arg, typ, context, placeholder, dynamic_offset_counter=None, datamem_start=None, zero_pad_i=None, pos=None): """ Copy necessary variables to pre-allocated memory section. :param holder: Complete holder for all args :param maxlen: Total length in bytes of the full arg section (static + dynamic). :param arg: Current arg to pack :param context: Context of arg :param placeholder: Static placeholder for static argument part. :param dynamic_offset_counter: position counter stored in static args. :param dynamic_placeholder: pointer to current position in memory to write dynamic values to. :param datamem_start: position where the whole datemem section starts. """ if isinstance(typ, BaseType): if isinstance(arg, LLLnode): value = unwrap_location(arg) else: value = Expr(arg, context).lll_node value = base_type_conversion(value, value.typ, typ, pos) holder.append( LLLnode.from_list(['mstore', placeholder, value], typ=typ, location='memory')) elif isinstance(typ, ByteArrayLike): if isinstance(arg, LLLnode): # Is prealloacted variable. source_lll = arg else: source_lll = Expr(arg, context).lll_node # Set static offset, in arg slot. holder.append( LLLnode.from_list( ['mstore', placeholder, ['mload', dynamic_offset_counter]])) # Get the biginning to write the ByteArray to. dest_placeholder = LLLnode.from_list( ['add', datamem_start, ['mload', dynamic_offset_counter]], typ=typ, location='memory', annotation="pack_args_by_32:dest_placeholder") copier = make_byte_array_copier(dest_placeholder, source_lll, pos=pos) holder.append(copier) # Add zero padding. holder.append(zero_pad(dest_placeholder, maxlen, zero_pad_i=zero_pad_i)) # Increment offset counter. increment_counter = LLLnode.from_list( [ 'mstore', dynamic_offset_counter, [ 'add', [ 'add', ['mload', dynamic_offset_counter], ['ceil32', ['mload', dest_placeholder]] ], 32, ], ], annotation='Increment dynamic offset counter') holder.append(increment_counter) elif isinstance(typ, ListType): maxlen += (typ.count - 1) * 32 typ = typ.subtype def check_list_type_match(provided): # Check list types match. if provided != typ: raise TypeMismatchException( "Log list type '%s' does not match provided, expected '%s'" % (provided, typ)) # NOTE: Below code could be refactored into iterators/getter functions for each type of # repetitive loop. But seeing how each one is a unique for loop, and in which way # the sub value makes the difference in each type of list clearer. # List from storage if isinstance(arg, ast.Attribute) and arg.value.id == 'self': stor_list = context.globals[arg.attr] check_list_type_match(stor_list.typ.subtype) size = stor_list.typ.count mem_offset = 0 for i in range(0, size): storage_offset = i arg2 = LLLnode.from_list( [ 'sload', [ 'add', ['sha3_32', Expr(arg, context).lll_node], storage_offset ] ], typ=typ, ) holder, maxlen = pack_args_by_32( holder, maxlen, arg2, typ, context, placeholder + mem_offset, pos=pos, ) mem_offset += get_size_of_type(typ) * 32 # List from variable. elif isinstance(arg, ast.Name): size = context.vars[arg.id].size pos = context.vars[arg.id].pos check_list_type_match(context.vars[arg.id].typ.subtype) mem_offset = 0 for _ in range(0, size): arg2 = LLLnode.from_list( pos + mem_offset, typ=typ, location=context.vars[arg.id].location) holder, maxlen = pack_args_by_32( holder, maxlen, arg2, typ, context, placeholder + mem_offset, pos=pos, ) mem_offset += get_size_of_type(typ) * 32 # List from list literal. else: mem_offset = 0 for arg2 in arg.elts: holder, maxlen = pack_args_by_32( holder, maxlen, arg2, typ, context, placeholder + mem_offset, pos=pos, ) mem_offset += get_size_of_type(typ) * 32 return holder, maxlen
def make_arg_clamper(datapos, mempos, typ, is_init=False): """ Clamps argument to type limits. Arguments --------- datapos : int | LLLnode Calldata offset of the value being clamped mempos : int | LLLnode Memory offset that the value is stored at during clamping typ : vyper.types.types.BaseType Type of the value is_init : bool, optional Boolean indicating if we are generating init bytecode Returns ------- LLLnode Arg clamper LLL """ if not is_init: data_decl = ["calldataload", ["add", 4, datapos]] copier = functools.partial(_mk_calldatacopy_copier, mempos=mempos) else: data_decl = ["codeload", ["add", "~codelen", datapos]] copier = functools.partial(_mk_codecopy_copier, mempos=mempos) # Numbers: make sure they're in range if is_base_type(typ, "int128"): return LLLnode.from_list( [ "clamp", ["mload", MemoryPositions.MINNUM], data_decl, ["mload", MemoryPositions.MAXNUM], ], typ=typ, annotation="checking int128 input", ) # Booleans: make sure they're zero or one elif is_base_type(typ, "bool"): return LLLnode.from_list( ["uclamplt", data_decl, 2], typ=typ, annotation="checking bool input", ) # Addresses: make sure they're in range elif is_base_type(typ, "address"): return LLLnode.from_list( ["uclamplt", data_decl, ["mload", MemoryPositions.ADDRSIZE]], typ=typ, annotation="checking address input", ) # Bytes: make sure they have the right size elif isinstance(typ, ByteArrayLike): return LLLnode.from_list( [ "seq", copier(data_decl, 32 + typ.maxlen), [ "assert", [ "le", ["calldataload", ["add", 4, data_decl]], typ.maxlen ] ], ], typ=None, annotation="checking bytearray input", ) # Lists: recurse elif isinstance(typ, ListType): if typ.count > 5 or (type(datapos) is list and type(mempos) is list): # find ultimate base type subtype = typ.subtype while hasattr(subtype, "subtype"): subtype = subtype.subtype # make arg clamper for the base type offset = MemoryPositions.FREE_LOOP_INDEX clamper = make_arg_clamper( ["add", datapos, ["mload", offset]], ["add", mempos, ["mload", offset]], subtype, is_init, ) if clamper.value == "pass": # no point looping if the base type doesn't require clamping return clamper # loop the entire array at once, even if it's multidimensional type_size = get_size_of_type(typ) i_incr = get_size_of_type(subtype) * 32 mem_to = type_size * 32 loop_label = f"_check_list_loop_{str(uuid.uuid4())}" lll_node = [ ["mstore", offset, 0], # init loop ["label", loop_label], clamper, ["mstore", offset, ["add", ["mload", offset], i_incr]], [ "if", ["lt", ["mload", offset], mem_to], ["goto", loop_label] ], ] else: lll_node = [] for i in range(typ.count): offset = get_size_of_type(typ.subtype) * 32 * i lll_node.append( make_arg_clamper(datapos + offset, mempos + offset, typ.subtype, is_init)) return LLLnode.from_list(["seq"] + lll_node, typ=None, annotation="checking list input") # Otherwise don't make any checks else: return LLLnode.from_list("pass")
def parse_private_function(code: ast.FunctionDef, sig: FunctionSignature, context: Context) -> LLLnode: """ Parse a private function (FuncDef), and produce full function body. :param sig: the FuntionSignature :param code: ast of function :return: full sig compare & function body """ validate_private_function(code, sig) # Get nonreentrant lock nonreentrant_pre, nonreentrant_post = get_nonreentrant_lock( sig, context.global_ctx) # Create callback_ptr, this stores a destination in the bytecode for a private # function to jump to after a function has executed. clampers: List[LLLnode] = [] # Allocate variable space. context.memory_allocator.increase_memory(sig.max_copy_size) _post_callback_ptr = f"{sig.name}_{sig.method_id}_post_callback_ptr" context.callback_ptr = context.new_placeholder(typ=BaseType('uint256')) clampers.append( LLLnode.from_list( ['mstore', context.callback_ptr, 'pass'], annotation='pop callback pointer', )) if sig.total_default_args > 0: clampers.append(LLLnode.from_list(['label', _post_callback_ptr])) # private functions without return types need to jump back to # the calling function, as there is no return statement to handle the # jump. if sig.output_type is None: stop_func = [['jump', ['mload', context.callback_ptr]]] else: stop_func = [['stop']] # Generate copiers if len(sig.base_args) == 0: copier = ['pass'] clampers.append(LLLnode.from_list(copier)) elif sig.total_default_args == 0: copier = get_private_arg_copier( total_size=sig.base_copy_size, memory_dest=MemoryPositions.RESERVED_MEMORY) clampers.append(LLLnode.from_list(copier)) # Fill variable positions for arg in sig.args: if isinstance(arg.typ, ByteArrayLike): mem_pos, _ = context.memory_allocator.increase_memory( 32 * get_size_of_type(arg.typ)) context.vars[arg.name] = VariableRecord(arg.name, mem_pos, arg.typ, False) else: context.vars[arg.name] = VariableRecord( arg.name, MemoryPositions.RESERVED_MEMORY + arg.pos, arg.typ, False, ) # Private function copiers. No clamping for private functions. dyn_variable_names = [ a.name for a in sig.base_args if isinstance(a.typ, ByteArrayLike) ] if dyn_variable_names: i_placeholder = context.new_placeholder(typ=BaseType('uint256')) unpackers: List[Any] = [] for idx, var_name in enumerate(dyn_variable_names): var = context.vars[var_name] ident = f"_load_args_{sig.method_id}_dynarg{idx}" o = make_unpacker(ident=ident, i_placeholder=i_placeholder, begin_pos=var.pos) unpackers.append(o) if not unpackers: unpackers = ['pass'] # 0 added to complete full overarching 'seq' statement, see private_label. unpackers.append(0) clampers.append( LLLnode.from_list( ['seq_unchecked'] + unpackers, typ=None, annotation='dynamic unpacker', pos=getpos(code), )) # Function has default arguments. if sig.total_default_args > 0: # Function with default parameters. default_sigs = sig_utils.generate_default_arg_sigs( code, context.sigs, context.global_ctx) sig_chain: List[Any] = ['seq'] for default_sig in default_sigs: sig_compare, private_label = get_sig_statements( default_sig, getpos(code)) # Populate unset default variables set_defaults = [] for arg_name in get_default_names_to_set(sig, default_sig): value = Expr(sig.default_values[arg_name], context).lll_node var = context.vars[arg_name] left = LLLnode.from_list(var.pos, typ=var.typ, location='memory', pos=getpos(code), mutable=var.mutable) set_defaults.append( make_setter(left, value, 'memory', pos=getpos(code))) current_sig_arg_names = [x.name for x in default_sig.args] # Load all variables in default section, if private, # because the stack is a linear pipe. copier_arg_count = len(default_sig.args) copier_arg_names = current_sig_arg_names # Order copier_arg_names, this is very important. copier_arg_names = [ x.name for x in default_sig.args if x.name in copier_arg_names ] # Variables to be populated from calldata/stack. default_copiers: List[Any] = [] if copier_arg_count > 0: # Get map of variables in calldata, with thier offsets offset = 4 calldata_offset_map = {} for arg in default_sig.args: calldata_offset_map[arg.name] = offset offset += (32 if isinstance(arg.typ, ByteArrayLike) else get_size_of_type(arg.typ) * 32) # Copy set default parameters from calldata dynamics = [] for arg_name in copier_arg_names: var = context.vars[arg_name] if isinstance(var.typ, ByteArrayLike): _size = 32 dynamics.append(var.pos) else: _size = var.size * 32 default_copiers.append( get_private_arg_copier( memory_dest=var.pos, total_size=_size, )) # Unpack byte array if necessary. if dynamics: i_placeholder = context.new_placeholder( typ=BaseType('uint256')) for idx, var_pos in enumerate(dynamics): ident = f'unpack_default_sig_dyn_{default_sig.method_id}_arg{idx}' default_copiers.append( make_unpacker( ident=ident, i_placeholder=i_placeholder, begin_pos=var_pos, )) default_copiers.append(0) # for over arching seq, POP sig_chain.append([ 'if', sig_compare, [ 'seq', private_label, LLLnode.from_list([ 'mstore', context.callback_ptr, 'pass', ], annotation='pop callback pointer', pos=getpos(code)), ['seq'] + set_defaults if set_defaults else ['pass'], ['seq_unchecked'] + default_copiers if default_copiers else ['pass'], ['goto', _post_callback_ptr] ] ]) # With private functions all variable loading occurs in the default # function sub routine. _clampers = [['label', _post_callback_ptr]] # Function with default parameters. o = LLLnode.from_list( [ 'seq', sig_chain, [ 'if', 0, # can only be jumped into [ 'seq', ['seq'] + nonreentrant_pre + _clampers + [parse_body(c, context) for c in code.body] + nonreentrant_post + stop_func ], ], ], typ=None, pos=getpos(code)) else: # Function without default parameters. sig_compare, private_label = get_sig_statements(sig, getpos(code)) o = LLLnode.from_list([ 'if', sig_compare, ['seq'] + [private_label] + nonreentrant_pre + clampers + [parse_body(c, context) for c in code.body] + nonreentrant_post + stop_func ], typ=None, pos=getpos(code)) return o return o
def make_arg_clamper(datapos, mempos, typ, is_init=False): """ Clamps argument to type limits. """ if not is_init: data_decl = ['calldataload', ['add', 4, datapos]] copier = functools.partial(_mk_calldatacopy_copier, mempos=mempos) else: data_decl = ['codeload', ['add', '~codelen', datapos]] copier = functools.partial(_mk_codecopy_copier, mempos=mempos) # Numbers: make sure they're in range if is_base_type(typ, 'int128'): return LLLnode.from_list([ 'clamp', ['mload', MemoryPositions.MINNUM], data_decl, ['mload', MemoryPositions.MAXNUM] ], typ=typ, annotation='checking int128 input') # Booleans: make sure they're zero or one elif is_base_type(typ, 'bool'): return LLLnode.from_list( ['uclamplt', data_decl, 2], typ=typ, annotation='checking bool input', ) # Addresses: make sure they're in range elif is_base_type(typ, 'address'): return LLLnode.from_list( ['uclamplt', data_decl, ['mload', MemoryPositions.ADDRSIZE]], typ=typ, annotation='checking address input', ) # Bytes: make sure they have the right size elif isinstance(typ, ByteArrayLike): return LLLnode.from_list([ 'seq', copier(data_decl, 32 + typ.maxlen), [ 'assert', ['le', ['calldataload', ['add', 4, data_decl]], typ.maxlen] ] ], typ=None, annotation='checking bytearray input') # Lists: recurse elif isinstance(typ, ListType): if typ.count > 5 or (type(datapos) is list and type(mempos) is list): subtype_size = get_size_of_type(typ.subtype) i_incr = subtype_size * 32 # for i in range(typ.count): mem_to = subtype_size * 32 * (typ.count - 1) loop_label = "_check_list_loop_%s" % str(uuid.uuid4()) # use LOOP_FREE_INDEX to store i offset = 288 o = [ ['mstore', offset, 0], # init loop ['label', loop_label], make_arg_clamper(['add', datapos, ['mload', offset]], ['add', mempos, ['mload', offset]], typ.subtype, is_init), ['mstore', offset, ['add', ['mload', offset], i_incr]], [ 'if', ['lt', ['mload', offset], mem_to], ['goto', loop_label] ] ] else: o = [] for i in range(typ.count): offset = get_size_of_type(typ.subtype) * 32 * i o.append( make_arg_clamper(datapos + offset, mempos + offset, typ.subtype, is_init)) return LLLnode.from_list(['seq'] + o, typ=None, annotation='checking list input') # Otherwise don't make any checks else: return LLLnode.from_list('pass')
def parse_external_function( code: vy_ast.FunctionDef, sig: FunctionSignature, context: Context, check_nonpayable: bool, ) -> LLLnode: """ Parse a external function (FuncDef), and produce full function body. :param sig: the FuntionSignature :param code: ast of function :param check_nonpayable: if True, include a check that `msg.value == 0` at the beginning of the function :return: full sig compare & function body """ func_type = code._metadata["type"] # Get nonreentrant lock nonreentrant_pre, nonreentrant_post = get_nonreentrant_lock( func_type, context.global_ctx) clampers = [] # Generate copiers copier: List[Any] = ["pass"] if not len(sig.base_args): copier = ["pass"] elif sig.name == "__init__": copier = [ "codecopy", MemoryPositions.RESERVED_MEMORY, "~codelen", sig.base_copy_size ] context.memory_allocator.expand_memory(sig.max_copy_size) clampers.append(copier) if check_nonpayable and sig.mutability != "payable": # if the contract contains payable functions, but this is not one of them # add an assertion that the value of the call is zero clampers.append(["assert", ["iszero", "callvalue"]]) # Fill variable positions default_args_start_pos = len(sig.base_args) for i, arg in enumerate(sig.args): if i < len(sig.base_args): clampers.append( make_arg_clamper( arg.pos, context.memory_allocator.get_next_memory_position(), arg.typ, sig.name == "__init__", )) if isinstance(arg.typ, ByteArrayLike): mem_pos = context.memory_allocator.expand_memory( 32 * get_size_of_type(arg.typ)) context.vars[arg.name] = VariableRecord(arg.name, mem_pos, arg.typ, False) else: if sig.name == "__init__": context.vars[arg.name] = VariableRecord( arg.name, MemoryPositions.RESERVED_MEMORY + arg.pos, arg.typ, False, ) elif i >= default_args_start_pos: # default args need to be allocated in memory. type_size = get_size_of_type(arg.typ) * 32 default_arg_pos = context.memory_allocator.expand_memory( type_size) context.vars[arg.name] = VariableRecord( name=arg.name, pos=default_arg_pos, typ=arg.typ, mutable=False, ) else: context.vars[arg.name] = VariableRecord(name=arg.name, pos=4 + arg.pos, typ=arg.typ, mutable=False, location="calldata") # Create "clampers" (input well-formedness checkers) # Return function body if sig.name == "__init__": o = LLLnode.from_list( ["seq"] + clampers + [parse_body(code.body, context)], # type: ignore pos=getpos(code), ) # Is default function. elif sig.is_default_func(): o = LLLnode.from_list( ["seq"] + clampers + [parse_body(code.body, context)] + [["stop"]], # type: ignore pos=getpos(code), ) # Is a normal function. else: # Function with default parameters. if sig.total_default_args > 0: function_routine = f"{sig.name}_{sig.method_id}" default_sigs = sig_utils.generate_default_arg_sigs( code, context.sigs, context.global_ctx) sig_chain: List[Any] = ["seq"] for default_sig in default_sigs: sig_compare, _ = get_sig_statements(default_sig, getpos(code)) # Populate unset default variables set_defaults = [] for arg_name in get_default_names_to_set(sig, default_sig): value = Expr(sig.default_values[arg_name], context).lll_node var = context.vars[arg_name] left = LLLnode.from_list( var.pos, typ=var.typ, location="memory", pos=getpos(code), mutable=var.mutable, ) set_defaults.append( make_setter(left, value, "memory", pos=getpos(code))) current_sig_arg_names = {x.name for x in default_sig.args} base_arg_names = {arg.name for arg in sig.base_args} copier_arg_count = len(default_sig.args) - len(sig.base_args) copier_arg_names = list(current_sig_arg_names - base_arg_names) # Order copier_arg_names, this is very important. copier_arg_names = [ x.name for x in default_sig.args if x.name in copier_arg_names ] # Variables to be populated from calldata/stack. default_copiers: List[Any] = [] if copier_arg_count > 0: # Get map of variables in calldata, with thier offsets offset = 4 calldata_offset_map = {} for arg in default_sig.args: calldata_offset_map[arg.name] = offset offset += (32 if isinstance(arg.typ, ByteArrayLike) else get_size_of_type(arg.typ) * 32) # Copy default parameters from calldata. for arg_name in copier_arg_names: var = context.vars[arg_name] calldata_offset = calldata_offset_map[arg_name] # Add clampers. default_copiers.append( make_arg_clamper( calldata_offset - 4, var.pos, var.typ, )) # Add copying code. _offset: Union[int, List[Any]] = calldata_offset if isinstance(var.typ, ByteArrayLike): _offset = [ "add", 4, ["calldataload", calldata_offset] ] default_copiers.append( get_external_arg_copier( memory_dest=var.pos, total_size=var.size * 32, offset=_offset, )) default_copiers.append(0) # for over arching seq, POP sig_chain.append([ "if", sig_compare, [ "seq", ["seq"] + set_defaults if set_defaults else ["pass"], ["seq_unchecked"] + default_copiers if default_copiers else ["pass"], ["goto", function_routine], ], ]) # Function with default parameters. function_jump_label = f"{sig.name}_{sig.method_id}_skip" o = LLLnode.from_list( [ "seq", sig_chain, [ "seq", ["goto", function_jump_label], ["label", function_routine], ["seq"] + nonreentrant_pre + clampers + [parse_body(c, context) for c in code.body] + nonreentrant_post + [["stop"]], ["label", function_jump_label], ], ], typ=None, pos=getpos(code), ) else: # Function without default parameters. sig_compare, _ = get_sig_statements(sig, getpos(code)) o = LLLnode.from_list( [ "if", sig_compare, ["seq"] + nonreentrant_pre + clampers + [parse_body(c, context) for c in code.body] + nonreentrant_post + [["stop"]], ], typ=None, pos=getpos(code), ) return o
def parse_public_function(code: ast.FunctionDef, sig: FunctionSignature, context: Context) -> LLLnode: """ Parse a public function (FuncDef), and produce full function body. :param sig: the FuntionSignature :param code: ast of function :return: full sig compare & function body """ validate_public_function(code, sig, context.global_ctx) # Get nonreentrant lock nonreentrant_pre, nonreentrant_post = get_nonreentrant_lock( sig, context.global_ctx) clampers = [] # Generate copiers copier: List[Any] = ['pass'] if not len(sig.base_args): copier = ['pass'] elif sig.name == '__init__': copier = [ 'codecopy', MemoryPositions.RESERVED_MEMORY, '~codelen', sig.base_copy_size ] context.memory_allocator.increase_memory(sig.max_copy_size) clampers.append(copier) # Add asserts for payable and internal if not sig.payable: clampers.append(['assert', ['iszero', 'callvalue']]) # Fill variable positions default_args_start_pos = len(sig.base_args) for i, arg in enumerate(sig.args): if i < len(sig.base_args): clampers.append( make_arg_clamper( arg.pos, context.memory_allocator.get_next_memory_position(), arg.typ, sig.name == '__init__', )) if isinstance(arg.typ, ByteArrayLike): mem_pos, _ = context.memory_allocator.increase_memory( 32 * get_size_of_type(arg.typ)) context.vars[arg.name] = VariableRecord(arg.name, mem_pos, arg.typ, False) else: if sig.name == '__init__': context.vars[arg.name] = VariableRecord( arg.name, MemoryPositions.RESERVED_MEMORY + arg.pos, arg.typ, False, ) elif i >= default_args_start_pos: # default args need to be allocated in memory. default_arg_pos, _ = context.memory_allocator.increase_memory( 32) context.vars[arg.name] = VariableRecord( name=arg.name, pos=default_arg_pos, typ=arg.typ, mutable=False, ) else: context.vars[arg.name] = VariableRecord(name=arg.name, pos=4 + arg.pos, typ=arg.typ, mutable=False, location='calldata') # Create "clampers" (input well-formedness checkers) # Return function body if sig.name == '__init__': o = LLLnode.from_list( ['seq'] + clampers + [parse_body(code.body, context)], # type: ignore pos=getpos(code), ) # Is default function. elif sig.is_default_func(): if len(sig.args) > 0: raise FunctionDeclarationException( 'Default function may not receive any arguments.', code) o = LLLnode.from_list( ['seq'] + clampers + [parse_body(code.body, context)], # type: ignore pos=getpos(code), ) # Is a normal function. else: # Function with default parameters. if sig.total_default_args > 0: function_routine = f"{sig.name}_{sig.method_id}" default_sigs = sig_utils.generate_default_arg_sigs( code, context.sigs, context.global_ctx) sig_chain: List[Any] = ['seq'] for default_sig in default_sigs: sig_compare, _ = get_sig_statements(default_sig, getpos(code)) # Populate unset default variables set_defaults = [] for arg_name in get_default_names_to_set(sig, default_sig): value = Expr(sig.default_values[arg_name], context).lll_node var = context.vars[arg_name] left = LLLnode.from_list(var.pos, typ=var.typ, location='memory', pos=getpos(code), mutable=var.mutable) set_defaults.append( make_setter(left, value, 'memory', pos=getpos(code))) current_sig_arg_names = {x.name for x in default_sig.args} base_arg_names = {arg.name for arg in sig.base_args} copier_arg_count = len(default_sig.args) - len(sig.base_args) copier_arg_names = list(current_sig_arg_names - base_arg_names) # Order copier_arg_names, this is very important. copier_arg_names = [ x.name for x in default_sig.args if x.name in copier_arg_names ] # Variables to be populated from calldata/stack. default_copiers: List[Any] = [] if copier_arg_count > 0: # Get map of variables in calldata, with thier offsets offset = 4 calldata_offset_map = {} for arg in default_sig.args: calldata_offset_map[arg.name] = offset offset += (32 if isinstance(arg.typ, ByteArrayLike) else get_size_of_type(arg.typ) * 32) # Copy default parameters from calldata. for arg_name in copier_arg_names: var = context.vars[arg_name] calldata_offset = calldata_offset_map[arg_name] # Add clampers. default_copiers.append( make_arg_clamper( calldata_offset - 4, var.pos, var.typ, )) # Add copying code. _offset: Union[int, List[Any]] = calldata_offset if isinstance(var.typ, ByteArrayLike): _offset = [ 'add', 4, ['calldataload', calldata_offset] ] default_copiers.append( get_public_arg_copier( memory_dest=var.pos, total_size=var.size * 32, offset=_offset, )) default_copiers.append(0) # for over arching seq, POP sig_chain.append([ 'if', sig_compare, [ 'seq', ['seq'] + set_defaults if set_defaults else ['pass'], ['seq_unchecked'] + default_copiers if default_copiers else ['pass'], ['goto', function_routine] ] ]) # Function with default parameters. o = LLLnode.from_list( [ 'seq', sig_chain, [ 'if', 0, # can only be jumped into [ 'seq', ['label', function_routine ], ['seq'] + nonreentrant_pre + clampers + [parse_body(c, context) for c in code.body] + nonreentrant_post + [['stop']] ], ], ], typ=None, pos=getpos(code)) else: # Function without default parameters. sig_compare, _ = get_sig_statements(sig, getpos(code)) o = LLLnode.from_list([ 'if', sig_compare, ['seq'] + nonreentrant_pre + clampers + [parse_body(c, context) for c in code.body] + nonreentrant_post + [['stop']] ], typ=None, pos=getpos(code)) return o
def pack_logging_topics(event_id, args, expected_topics, context, pos): topics = [event_id] code_pos = pos for pos, expected_topic in enumerate(expected_topics): expected_type = expected_topic.typ arg = args[pos] value = Expr(arg, context).lll_node arg_type = value.typ if isinstance(arg_type, ByteArrayLike) and isinstance(expected_type, ByteArrayLike): if arg_type.maxlen > expected_type.maxlen: raise TypeMismatch( f"Topic input bytes are too big: {arg_type} {expected_type}", code_pos ) if isinstance(arg, (vy_ast.Str, vy_ast.Bytes)): # for literals, generate the topic at compile time value = arg.value if isinstance(value, str): value = value.encode() topics.append(bytes_to_int(keccak256(value))) elif value.location == "memory": topics.append(["sha3", ["add", value, 32], ["mload", value]]) else: # storage or calldata placeholder = context.new_internal_variable(value.typ) placeholder_node = LLLnode.from_list(placeholder, typ=value.typ, location="memory") copier = make_byte_array_copier( placeholder_node, LLLnode.from_list("_sub", typ=value.typ, location=value.location), ) lll_node = [ "with", "_sub", value, ["seq", copier, ["sha3", ["add", placeholder, 32], ["mload", placeholder]]], ] topics.append(lll_node) elif isinstance(arg_type, ListType) and isinstance(expected_type, ListType): size = get_size_of_type(value.typ) * 32 if value.location == "memory": topics.append(["sha3", value, size]) else: # storage or calldata placeholder = context.new_internal_variable(value.typ) placeholder_node = LLLnode.from_list(placeholder, typ=value.typ, location="memory") setter = make_setter(placeholder_node, value, "memory", value.pos) lll_node = ["seq", setter, ["sha3", placeholder, size]] topics.append(lll_node) else: if arg_type != expected_type: raise TypeMismatch( f"Invalid type for logging topic, got {arg_type} expected {expected_type}", value.pos, ) value = unwrap_location(value) value = base_type_conversion(value, arg_type, expected_type, pos=code_pos) topics.append(value) return topics
def make_arg_clamper(datapos, mempos, typ, is_init=False): """ Clamps argument to type limits. """ if not is_init: data_decl = ["calldataload", ["add", 4, datapos]] copier = functools.partial(_mk_calldatacopy_copier, mempos=mempos) else: data_decl = ["codeload", ["add", "~codelen", datapos]] copier = functools.partial(_mk_codecopy_copier, mempos=mempos) # Numbers: make sure they're in range if is_base_type(typ, "int128"): return LLLnode.from_list( [ "clamp", ["mload", MemoryPositions.MINNUM], data_decl, ["mload", MemoryPositions.MAXNUM], ], typ=typ, annotation="checking int128 input", ) # Booleans: make sure they're zero or one elif is_base_type(typ, "bool"): return LLLnode.from_list( ["uclamplt", data_decl, 2], typ=typ, annotation="checking bool input", ) # Addresses: make sure they're in range elif is_base_type(typ, "address"): return LLLnode.from_list( ["uclamplt", data_decl, ["mload", MemoryPositions.ADDRSIZE]], typ=typ, annotation="checking address input", ) # Bytes: make sure they have the right size elif isinstance(typ, ByteArrayLike): return LLLnode.from_list( [ "seq", copier(data_decl, 32 + typ.maxlen), [ "assert", [ "le", ["calldataload", ["add", 4, data_decl]], typ.maxlen ] ], ], typ=None, annotation="checking bytearray input", ) # Lists: recurse elif isinstance(typ, ListType): if typ.count > 5 or (type(datapos) is list and type(mempos) is list): subtype_size = get_size_of_type(typ.subtype) i_incr = subtype_size * 32 mem_to = subtype_size * 32 * (typ.count - 1) loop_label = f"_check_list_loop_{str(uuid.uuid4())}" offset = 288 o = [ ["mstore", offset, 0], # init loop ["label", loop_label], make_arg_clamper( ["add", datapos, ["mload", offset]], ["add", mempos, ["mload", offset]], typ.subtype, is_init, ), ["mstore", offset, ["add", ["mload", offset], i_incr]], [ "if", ["lt", ["mload", offset], mem_to], ["goto", loop_label] ], ] else: o = [] for i in range(typ.count): offset = get_size_of_type(typ.subtype) * 32 * i o.append( make_arg_clamper(datapos + offset, mempos + offset, typ.subtype, is_init)) return LLLnode.from_list(["seq"] + o, typ=None, annotation="checking list input") # Otherwise don't make any checks else: return LLLnode.from_list("pass")