def test_serialize(name: str, typ: Type[View], value: View, serialized: str, root: str, obj: ObjType): stream = io.BytesIO() length = value.serialize(stream) stream.seek(0) encoded = stream.read() assert encoded.hex() == serialized assert length * 2 == len(serialized)
def set(self, i: int, v: View) -> None: elem_type: Type[View] = self.item_elem_cls(i) # if not the right type, try to coerce it if not isinstance(v, elem_type): v = elem_type.coerce_view(v) if self.is_packed(): # basic types are more complicated: we operate on a subsection of a bottom chunk if isinstance(v, BasicView): elems_per_chunk = 32 // v.type_byte_length() chunk_i = i // elems_per_chunk target = to_gindex(chunk_i, self.tree_depth()) chunk_setter_link: Link = self.get_backing().setter(target) chunk = self.get_backing().getter(target) new_chunk = v.backing_from_base(chunk, i % elems_per_chunk) self.set_backing(chunk_setter_link(new_chunk)) else: raise Exception( "cannot pack subtree elements that are not basic types") else: setter_link: Link = self.get_backing().setter( to_gindex(i, self.tree_depth())) self.set_backing(setter_link(v.get_backing()))
def append(self, v: View): ll = self.length() if ll >= self.__class__.limit(): raise Exception("list is maximum capacity, cannot append") i = ll elem_type: Type[View] = self.__class__.element_cls() if not isinstance(v, elem_type): v = elem_type.coerce_view(v) target: Gindex if self.__class__.is_packed(): next_backing = self.get_backing() if isinstance(v, BasicView): elems_per_chunk = 32 // elem_type.type_byte_length() chunk_i = i // elems_per_chunk target = to_gindex(chunk_i, self.__class__.tree_depth()) chunk: Node if i % elems_per_chunk == 0: set_last = next_backing.setter(target, expand=True) chunk = zero_node(0) else: set_last = next_backing.setter(target) chunk = next_backing.getter(target) chunk = v.backing_from_base(chunk, i % elems_per_chunk) next_backing = set_last(chunk) else: raise Exception( "cannot append a packed element that is not a basic type") else: target = to_gindex(i, self.__class__.tree_depth()) set_last = self.get_backing().setter(target, expand=True) next_backing = set_last(v.get_backing()) set_length = next_backing.rebind_right new_length = uint256(ll + 1).get_backing() next_backing = set_length(new_length) self.set_backing(next_backing)
def test_readonly_iters(name: str, typ: Type[View], value: View, serialized: str, root: str, obj: ObjType): if hasattr(value, 'readonly_iter'): r_iter = value.readonly_iter() i = 0 for expected_elem in iter(value): got_elem = r_iter.__next__() assert expected_elem == got_elem i += 1 try: r_iter.__next__() assert False except StopIteration: pass if isinstance(value, Container): fields = list(value) expected = [ getattr(value, fkey) for fkey in value.__class__.fields().keys() ] assert fields == expected
def serialize(obj: View) -> bytes: return obj.encode_bytes()
def hash_tree_root(obj: View) -> Bytes32: return Bytes32(obj.get_backing().merkle_root())
def test_hash_tree_root(name: str, typ: Type[View], value: View, serialized: str, root: str, obj: ObjType): assert value.hash_tree_root().hex() == root
def test_encode_bytes(name: str, typ: Type[View], value: View, serialized: str, root: str, obj: ObjType): encoded = value.encode_bytes() assert encoded.hex() == serialized
def test_value_byte_length(name: str, typ: Type[View], value: View, serialized: str, root: str, obj: ObjType): assert value.value_byte_length() == len(bytes.fromhex(serialized))
def test_json_load(name: str, typ: Type[View], value: View, serialized: str, root: str, obj: ObjType): # Bigger round trip: check if a json-like obj can be parsed correctly. assert value.from_obj(json.loads(json.dumps(obj))).to_obj() == obj
def test_json_dump(name: str, typ: Type[View], value: View, serialized: str, root: str, obj: ObjType): assert json.dumps(value.to_obj()) == json.dumps(obj)
def test_to_obj(name: str, typ: Type[View], value: View, serialized: str, root: str, obj: ObjType): assert value.to_obj() == obj