def test_length(self): with serialization.BinaryWriter() as br: br.write_uint16(1000) self.assertEqual(2, len(br)) with serialization.BinaryWriter() as br: br.write_uint64(1000) self.assertEqual(8, len(br))
def test_serialize_to_stream(self): data_uint160 = bytearray(20 * [0x11]) data_uint256 = bytearray(32 * [0x11]) uint160 = UInt160(data_uint160) uint256 = UInt256(data_uint256) with serialization.BinaryWriter() as bw: bw.write_serializable(uint160) self.assertEqual(data_uint160, bw._stream.getvalue()) with serialization.BinaryWriter() as bw: bw.write_serializable(uint256) self.assertEqual(data_uint256, bw._stream.getvalue())
def designate_as_role(self, engine: contracts.ApplicationEngine, role: DesignateRole, nodes: List[cryptography.ECPoint]) -> None: if len(nodes) == 0: raise ValueError( "[DesignateContract] Cannot designate empty nodes list") if len(nodes) > 32: raise ValueError( "[DesignateContract] Cannot designate a nodes list larger than 32" ) if not self._check_committee(engine): raise ValueError("[DesignateContract] check committee failed") if engine.snapshot.persisting_block is None: raise ValueError nodes.sort() index = engine.snapshot.persisting_block.index + 1 storage_key = self.create_key( role.to_bytes(1, 'little') + self._to_uint32(index)) with serialization.BinaryWriter() as writer: writer.write_serializable_list(nodes) storage_item = storage.StorageItem(writer.to_array()) engine.snapshot.storages.update(storage_key, storage_item) state = vm.ArrayStackItem(engine.reference_counter) state.append(vm.IntegerStackItem(role.value)) state.append( vm.IntegerStackItem(engine.snapshot.persisting_block.index)) msgrouter.interop_notify(self.hash, "Designation", state)
def hash(self) -> types.UInt256: with serialization.BinaryWriter() as bw: self.serialize_unsigned(bw) data_to_hash = bytearray(bw._stream.getvalue()) data = hashlib.sha256( hashlib.sha256(data_to_hash).digest()).digest() return types.UInt256(data=data)
def test_write_uint64(self): with serialization.BinaryWriter() as bw: bw.write_uint64(0xFFFFFFFFFFFFFFFF) # this also validates signed vs unsigned. If it was signed it would need an extra \x00 to express the value # and would not fit in 8 bytes self.assertEqual(b'\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF', bw._stream.getvalue())
def __deepcopy__(self, memodict={}): # not the best, but faster than letting deepcopy() do introspection with serialization.BinaryWriter() as bw: self.serialize_special(bw) with serialization.BinaryReader(bw.to_array()) as br: tx = Transaction._serializable_init() tx.deserialize_special(br) return tx
def hash(self) -> types.UInt256: """ Get a unique block identifier based on the unsigned data portion of the object. """ with serialization.BinaryWriter() as bw: self.serialize_unsigned(bw) data_to_hash = bytearray(bw._stream.getvalue()) data = hashlib.sha256(data_to_hash).digest() return types.UInt256(data=data)
def get_hash_data(self, protocol_magic: int) -> bytes: """ Get the unsigned data Args: protocol_magic: network protocol number (NEO MainNet = 5195086, Testnet = 1951352142, private net = ??) """ with serialization.BinaryWriter() as writer: writer.write_uint32(protocol_magic) self.serialize_unsigned(writer) return writer.to_array()
def _internal_transaction_put(self, transaction: payloads.Transaction, batch=None): if batch: db = batch else: db = self._real_db with serialization.BinaryWriter() as bw: transaction.serialize_special(bw) serialized_tx = bw.to_array() db.put(DBPrefixes.TRANSACTIONS + transaction.hash().to_array(), serialized_tx)
def test_write_var_int(self): with self.assertRaises(TypeError) as context: with serialization.BinaryWriter() as bw: bw.write_var_int(b'\x01') self.assertIn("not int type.", str(context.exception)) with self.assertRaises(ValueError) as context: with serialization.BinaryWriter() as bw: bw.write_var_int(-1) self.assertIn("too small.", str(context.exception)) with serialization.BinaryWriter() as bw: bw.write_var_int(1) self.assertEqual(b'\x01', bw._stream.getvalue()) with serialization.BinaryWriter() as bw: bw.write_var_int(65535) # 0xFFFF edge self.assertEqual(b'\xfd\xff\xFF', bw._stream.getvalue()) with serialization.BinaryWriter() as bw: bw.write_var_int(4294967295) # 0xFFFFFFFF edge self.assertEqual(b'\xfe\xff\xff\xff\xff', bw._stream.getvalue()) with serialization.BinaryWriter() as bw: bw.write_var_int(4294967296) self.assertEqual(b'\xff\x00\x00\x00\x00\x01\x00\x00\x00', bw._stream.getvalue())
def serialize(stack_item: vm.StackItem, max_size: int) -> bytes: """ Serialize a stack item. Note: Interop and Pointer stack items are not supported. Args: stack_item: the stack item to serialize. max_size: maximum byte array output size. Raises: ValueError: when a circular reference exists in a Map, Struct or Array. ValueError: if the output exceeds `max_size`. """ unserialized = [stack_item] serialized: List[Union[vm.StackItem, PlaceHolder]] = [] with serialization.BinaryWriter() as writer: while len(unserialized) > 0: item = unserialized.pop() item_type = type(item) writer.write_uint8(int(item.get_type())) if item_type == vm.NullStackItem: continue elif item_type == vm.BooleanStackItem: writer.write_bool(item.to_boolean()) elif item_type in [ vm.IntegerStackItem, vm.ByteStringStackItem, vm.BufferStackItem ]: writer.write_var_bytes(item.to_array()) elif item_type in [vm.ArrayStackItem, vm.StructStackItem]: if any(map(lambda i: id(i) == id(item), serialized)): raise ValueError("Item already exists") serialized.append(item) writer.write_var_int(len(item)) # type: ignore for element in reversed(item): # type: ignore unserialized.append(element) elif item_type == vm.MapStackItem: if any(map(lambda i: id(i) == id(item), serialized)): raise ValueError("Item already exists") serialized.append(item) writer.write_var_int(len(item)) # type: ignore for k, v in reversed(item): # type: ignore unserialized.append(v) unserialized.append(k) else: raise ValueError(f"Cannot serialize {item_type}") if len(writer) > max_size: raise ValueError("Output length exceeds max size") return writer.to_array()
def test_write_var_bytes_with_grouping(self): # test with invalid group_size's with self.assertRaises(ValueError) as context: with serialization.BinaryWriter() as bw: bw.write_bytes_with_grouping(b'', group_size=-1) self.assertIn("group_size must be > 0 and <= 254", str(context.exception)) with self.assertRaises(ValueError) as context: with serialization.BinaryWriter() as bw: bw.write_bytes_with_grouping(b'', group_size=0) self.assertIn("group_size must be > 0 and <= 254", str(context.exception)) with self.assertRaises(ValueError) as context: with serialization.BinaryWriter() as bw: bw.write_bytes_with_grouping(b'', group_size=255) self.assertIn("group_size must be > 0 and <= 254", str(context.exception)) # test empty value with serialization.BinaryWriter() as bw: bw.write_bytes_with_grouping(b'', group_size=16) self.assertEqual(b'\x00' * 17, bw._stream.getvalue()) # test with value smaller than group_size with serialization.BinaryWriter() as bw: group_size = 16 value = b'\x11' * 10 bw.write_bytes_with_grouping(value, group_size=group_size) padding = (group_size - len(value)) * b'\x00' self.assertEqual(value + padding + bytes([len(value)]), bw._stream.getvalue()) # test with value exact same length as group_size with serialization.BinaryWriter() as bw: group_size = 16 value = b'\x11' * 16 bw.write_bytes_with_grouping(value, group_size=group_size) self.assertEqual(value + b'\xff', bw._stream.getvalue()) # test with value exceeding length of group_size with serialization.BinaryWriter() as bw: group_size = 16 value = b'\x11' * 20 bw.write_bytes_with_grouping(value, group_size=group_size) padding = b'\x00' * 12 len_remainder = len(value) % group_size # 16 bytes value + group byte + 4 remaining bytes + padding + length remainder expected = b'\x11' * 16 + b'\x10' + b'\x11' * 4 + padding + bytes( [len_remainder]) self.assertEqual(expected, bw._stream.getvalue())
def test_write_var_bytes(self): with serialization.BinaryWriter() as bw: bw.write_var_bytes(b'\x01\x02\x03\x04') self.assertEqual(b'\x04\x01\x02\x03\x04', bw._stream.getvalue())
def _request(self, engine: contracts.ApplicationEngine, url: str, filter: str, callback: str, user_data: vm.StackItem, gas_for_response: int) -> None: if len(url.encode('utf-8')) > self._MAX_URL_LENGTH or \ len(filter.encode('utf-8')) > self._MAX_FILTER_LEN or \ len(callback.encode('utf-8')) > self._MAX_CALLBACK_LEN or \ callback.startswith("_") or \ gas_for_response < 10000000: raise ValueError engine.add_gas(self.get_price(engine.snapshot)) engine.add_gas(gas_for_response) self._gas.mint(engine, self.hash, vm.BigInteger(gas_for_response), False) si_item_id = engine.snapshot.storages.get(self.key_request_id, read_only=False) item_id = vm.BigInteger(si_item_id.value) si_item_id.value = (item_id + 1).to_array() if contracts.ManagementContract().get_contract(engine.snapshot, engine.calling_scripthash) is None: raise ValueError oracle_request = OracleRequest(self._get_original_txid(engine), gas_for_response, url, filter, engine.calling_scripthash, callback, contracts.BinarySerializer.serialize(user_data, self._MAX_USER_DATA_LEN)) engine.snapshot.storages.put(self.key_request + int(item_id).to_bytes(8, 'little', signed=False), storage.StorageItem(oracle_request.to_array()) ) sk_id_list = self.key_id_list + self._get_url_hash(url) si_id_list = engine.snapshot.storages.try_get(sk_id_list, read_only=False) if si_id_list is None: si_id_list = storage.StorageItem(b'\x00') with serialization.BinaryReader(si_id_list.value) as reader: count = reader.read_var_int() id_list = [] for _ in range(count): id_list.append(reader.read_uint64()) id_list.append(item_id) if len(id_list) >= 256: raise ValueError("Oracle has too many pending responses for this url") with serialization.BinaryWriter() as writer: writer.write_var_int(len(id_list)) for id in id_list: writer.write_uint64(id) si_id_list.value = writer.to_array() engine.snapshot.storages.update(sk_id_list, si_id_list) state = vm.ArrayStackItem( engine.reference_counter, [vm.IntegerStackItem(item_id), vm.ByteStringStackItem(engine.calling_scripthash.to_array()), vm.ByteStringStackItem(url.encode()), vm.ByteStringStackItem(filter.encode()), ] ) msgrouter.interop_notify(self.hash, "OracleRequest", state)
def test_read_bytes_with_grouping(self): # test with invalid group_size's with self.assertRaises(ValueError) as context: with serialization.BinaryReader(b'') as br: br.read_bytes_with_grouping(group_size=-1) self.assertIn("group_size must be > 0 and <= 254", str(context.exception)) with self.assertRaises(ValueError) as context: with serialization.BinaryReader(b'') as br: br.read_bytes_with_grouping(group_size=-0) self.assertIn("group_size must be > 0 and <= 254", str(context.exception)) with self.assertRaises(ValueError) as context: with serialization.BinaryReader(b'') as br: br.read_bytes_with_grouping(group_size=255) self.assertIn("group_size must be > 0 and <= 254", str(context.exception)) # read empty value group_size = 16 with serialization.BinaryWriter() as bw: bw.write_bytes_with_grouping(b'', group_size) data = bw._stream.getvalue() with serialization.BinaryReader(data) as br: result = br.read_bytes_with_grouping(group_size) self.assertEqual(b'', result) # test with value smaller than group_size with serialization.BinaryWriter() as bw: input = b'\x11' * 10 bw.write_bytes_with_grouping(input, group_size) data = bw._stream.getvalue() with serialization.BinaryReader(data) as br: result = br.read_bytes_with_grouping(group_size) self.assertEqual(input, result) # test with value exact same length as group_size with serialization.BinaryWriter() as bw: input = b'\x11' * 16 bw.write_bytes_with_grouping(input, group_size) data = bw._stream.getvalue() with serialization.BinaryReader(data) as br: result = br.read_bytes_with_grouping(group_size) self.assertEqual(input, result) # test with value exceeding length of group_size (thus having 2 groups) with serialization.BinaryWriter() as bw: input = b'\x11' * 20 bw.write_bytes_with_grouping(input, group_size) data = bw._stream.getvalue() with serialization.BinaryReader(data) as br: result = br.read_bytes_with_grouping(group_size) self.assertEqual(input, result) # test with invalid group size encoding group_data = b'\x11' * 16 # this should not be bigger than `group_size`, in this case b'\x10' remaining_group_length = b'\x11' data = group_data + remaining_group_length with self.assertRaises(ValueError) as context: with serialization.BinaryReader(data) as br: br.read_bytes_with_grouping(group_size) self.assertIn("corrupt remainder length", str(context.exception))
def __len__(self): # TODO: see if there is a cleaner way of doing this with serialization.BinaryWriter() as bw: bw.write_bytes_with_grouping(self.key, 16) key_len = len(bw._stream.getvalue()) return s.uint160 + key_len
def test_write_bytes(self): with serialization.BinaryWriter() as bw: bw.write_uint8(5) bw.write_uint16(257) self.assertEqual(b'\x05\x01\x01', bw._stream.getvalue())
def test_write_list_of_serializable_objects(self): s1 = SerializableObj(1) s2 = SerializableObj(3) with serialization.BinaryWriter() as bw: bw.write_serializable_list([s1, s2]) self.assertEqual(b'\x02\x01\x03', bw._stream.getvalue())
def test_write_serializable(self): s1 = SerializableObj(1) with serialization.BinaryWriter() as bw: bw.write_serializable(s1) self.assertEqual(b'\x01', bw._stream.getvalue())
def test_write_bool(self): with serialization.BinaryWriter() as bw: bw.write_bool(0) bw.write_bool(1) bw.write_bool(15) self.assertEqual(b'\x00\x01\x01', bw._stream.getvalue())
def test_write_var_string(self): with serialization.BinaryWriter() as bw: bw.write_var_string('ABC') self.assertEqual(b'\x03\x41\x42\x43', bw._stream.getvalue())
def test_write_int64(self): with serialization.BinaryWriter() as bw: bw.write_int64(-1) # this also validates signed vs unsigned. If it was unsigned it would be without \x00 self.assertEqual(b'\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF', bw._stream.getvalue())