def test_reading_uint8(self): input_data = b'\x01\x02' # make sure we read only 1 byte with serialization.BinaryReader(input_data) as br: b = br.read_uint8() self.assertEqual(input_data[0], b) # validate we read an unsigned byte with serialization.BinaryReader(b'\xFF') as br: b = br.read_uint8() self.assertEqual(255, b)
def test_read_uint16(self): input_data = b'\x01\x02\x03' # make sure we read only 2 bytes with serialization.BinaryReader(input_data) as br: b = br.read_uint16() self.assertEqual(int.from_bytes(input_data[:2], 'little'), b) # validate we read as unsigned with serialization.BinaryReader(b'\x01\xFF') as br: b = br.read_uint16() self.assertEqual(65281, b)
def test_read_uint32(self): input_data = b'\x01\x02\x03\x04\x05' # make sure we read only 4 bytes with serialization.BinaryReader(input_data) as br: b = br.read_uint32() self.assertEqual(int.from_bytes(input_data[:4], 'little'), b) # validate we read as unsigned with serialization.BinaryReader(b'\x01\x02\x03\xFF') as br: b = br.read_uint32() self.assertEqual(4278387201, b)
def test_read_uint64(self): input_data = b'\x01\x02\x03\x04\x05\x06\x07\x08\x09' # make sure we read only 4 bytes with serialization.BinaryReader(input_data) as br: b = br.read_uint64() self.assertEqual(int.from_bytes(input_data[:8], 'little'), b) # validate we read as unsigned with serialization.BinaryReader( b'\x01\x02\x03\x04\x05\x06\x07\xFF') as br: b = br.read_uint64() self.assertEqual(18376663423120507393, b)
def test_read_string(self): input_data = b'\x02\x41\x42' with serialization.BinaryReader(input_data) as br: s = br.read_var_string() self.assertIsInstance(s, str) self.assertEqual('AB', s) # test with insufficient data input_data = b'\x02\x41' with self.assertRaises(ValueError) as context: with serialization.BinaryReader(input_data) as br: s = br.read_var_string() self.assertIn("unpack requires a buffer of 2 bytes", str(context.exception))
def test_deserialize_from_stream(self): data_uint160 = bytearray(20 * [0x11]) data_uint256 = bytearray(32 * [0x11]) with serialization.BinaryReader(data_uint160) as br: # we explicitly call deserialize, instead of br.read_uint160() for coverage uint160 = UInt160.zero() uint160.deserialize(br) self.assertEqual(data_uint160, uint160._data) with serialization.BinaryReader(data_uint256) as br: uint256 = UInt256.zero() uint256.deserialize(br) self.assertEqual(data_uint256, uint256._data)
def test_read_bool(self): with serialization.BinaryReader(b'\x00') as br: b = br.read_bool() self.assertIsInstance(b, bool) self.assertFalse(b) with serialization.BinaryReader(b'\x01') as br: b = br.read_bool() self.assertIsInstance(b, bool) self.assertTrue(b) with serialization.BinaryReader(b'\x55') as br: b = br.read_bool() self.assertIsInstance(b, bool) self.assertTrue(b)
def from_snapshot(cls, snapshot: storage.Snapshot): c = cls(snapshot, {}) with serialization.BinaryReader( snapshot.storages.get(c._storage_key, read_only=True).value) as reader: c.deserialize(reader) return c
def deserialize_from_bytes(cls: Type[VersionPayload], data: bytes) -> VersionPayload: """ Deserialize object. """ with serialization.BinaryReader(data) as br: obj = cls() obj.deserialize(br) return obj
def __deepcopy__(self, memodict={}): # not the best, but faster than letting deepcopy() do introspection with serialization.BinaryWriter() as bw: self.serialize_special(bw) with serialization.BinaryReader(bw.to_array()) as br: tx = Transaction._serializable_init() tx.deserialize_special(br) return tx
def test_reading_serializable(self): s1 = SerializableObj(1) with serialization.BinaryReader(s1.to_array()) as br: obj = br.read_serializable(obj_type=SerializableObj) self.assertIsInstance(obj, serialization.ISerializable) self.assertIsInstance(obj, SerializableObj) self.assertEqual(1, obj.a)
def test_deserialization_from_stream(self): # see test_create_compressed_inv_message() how it was obtained raw_data = binascii.unhexlify(b'01270D3F2C0400010067500000000000') with serialization.BinaryReader(raw_data) as br: m = message.Message() m.deserialize(br) self.assertEqual(m.type, message.MessageType.INV) self.assertEqual(m.payload.type, payloads.inventory.InventoryType.BLOCK)
def _internal_transaction_get(self, hash: types.UInt256): tx_bytes = self._real_db.get(DBPrefixes.TRANSACTIONS + hash.to_array()) if tx_bytes is None: # this is a must if not found! raise KeyError with serialization.BinaryReader(tx_bytes) as br: tx = payloads.Transaction._serializable_init() tx.deserialize_special(br) return tx
def test_read_var_int(self): # no value with serialization.BinaryReader(b'\x00') as br: b = br.read_var_int() self.assertEqual(0, b) # a value smaller than 0xFD is encoded in 1 byte with serialization.BinaryReader(b'\xFC') as br: b = br.read_var_int() self.assertEqual(252, b) # a value smaller than 0xFFFF is encoded in 3 bytes input_data = b'\xfd\x01\xFF' with serialization.BinaryReader(input_data) as br: b = br.read_var_int() self.assertEqual(65281, b) # a value smaller than 0xFFFFFFFF is encoded in 5 bytes input_data = b'\xfe\x01\x02\x03\xFF' with serialization.BinaryReader(input_data) as br: b = br.read_var_int() self.assertEqual(4278387201, b) # a value bigger than 0xFFFFFFFF is encoded in 9 bytes input_data = b'\xff\x01\x02\x03\x04\x05\x06\x07\x08\xFF' with serialization.BinaryReader(input_data) as br: b = br.read_var_int() self.assertEqual(578437695752307201, b) # test reader with max size with self.assertRaises(ValueError) as context: with serialization.BinaryReader(b'\xFC') as br: b = br.read_var_int(max=10) self.assertIn("Invalid format", str(context.exception))
def test_reading_list_of_serializables(self): s1 = SerializableObj(1) s2 = SerializableObj(2) array_length = b'\x02' with serialization.BinaryReader(array_length + s1.to_array() + s2.to_array()) as br: objs = br.read_serializable_list(obj_type=SerializableObj) self.assertIsInstance(objs, list) self.assertTrue(2, len(objs)) for o in objs: self.assertIsInstance(o, SerializableObj) # test but limit max array array_length = b'\x02' with serialization.BinaryReader(array_length + s1.to_array() + s2.to_array()) as br: objs = br.read_serializable_list(obj_type=SerializableObj, max=1) self.assertIsInstance(objs, list) self.assertTrue(1, len(objs)) for o in objs: self.assertIsInstance(o, SerializableObj)
def test_serialize(self): # if test_create() passes, then we know `to_json()` is ok, which serialize internally uses cm = contracts.ContractManifest("test_contract") method1 = contracts.ContractMethodDescriptor( name="main_entry", offset=0, parameters=[], return_type=contracts.ContractParameterType.INTEGER, safe=True) cm.abi.methods = [method1] with serialization.BinaryReader(cm.to_array()) as br: data = br.read_var_string() self.assertDictEqual(self.expected_json, json.loads(data))
def test_read_var_bytes(self): input_data = b'\x02\x01\x02\x03' with serialization.BinaryReader(input_data) as br: b = br.read_var_bytes() self.assertEqual(input_data[1:3], b) """ test reading with insufficient data the C# equivalent will try to read up to what's encoded amount of bytes from the stream or less! byte[] value = { 0x2 }; using (MemoryStream ms = new MemoryStream(value, false)) using (BinaryReader reader = new BinaryReader(ms, System.Text.Encoding.UTF8)) { byte[] b = reader.ReadVarBytes(); } # encode 2 bytes of remaining data, but supply none """ input_data = b'\x02' with serialization.BinaryReader(input_data) as br: b = br.read_var_bytes() self.assertEqual(b'', b)
def _internal_transaction_all(self): res = [] with self._real_db.iterator(prefix=DBPrefixes.TRANSACTIONS, include_key=False, include_value=True) as it: for value in it: # strip off prefix with serialization.BinaryReader(value) as br: v = payloads.Transaction._serializable_init() v.deserialize_special(br) res.append(v) # yielding outside of iterator to make sure the LevelDB iterator is closed and not leaking resources for tx in res: yield deepcopy(tx)
def test_read_bytes(self): with serialization.BinaryReader(b'\x01') as br: b = br.read_byte() self.assertEqual(b'\x01', b) with self.assertRaises(ValueError) as context: with serialization.BinaryReader(b'') as br: b = br.read_byte() self.assertIn("Could not read byte from empty stream", str(context.exception)) input_data = b'\x01\x02\x03' with serialization.BinaryReader(input_data) as br: b = br.read_bytes(2) self.assertEqual(input_data[:2], b) # try reading more than available with self.assertRaises(ValueError) as context: with serialization.BinaryReader(input_data) as br: b = br.read_bytes(5) self.assertIn( "Could not read 5 bytes from stream. Only found 3 bytes of data", str(context.exception))
async def _read(): try: # readexactly can throw ConnectionResetError message_header = await self._stream_reader_orig.readexactly(3) payload_length = message_header[2] if payload_length == 0xFD: len_bytes = await self._stream_reader_orig.readexactly(2) payload_length, = struct.unpack("<H", len_bytes) elif payload_length == 0xFE: len_bytes = await self._stream_reader_orig.readexactly(4) payload_length, = struct.unpack("<I", len_bytes) elif payload_length == 0xFE: len_bytes = await self._stream_reader_orig.readexactly(8) payload_length, = struct.unpack("<Q", len_bytes) else: len_bytes = b'' if payload_length > Message.PAYLOAD_MAX_SIZE: raise ValueError("Invalid format") payload_data = await self._stream_reader_orig.readexactly( payload_length) raw = message_header + len_bytes + payload_data with serialization.BinaryReader(raw) as br: m = Message() try: m.deserialize(br) return m except Exception: logger.debug( f"Failed to deserialize message: {traceback.format_exc()}" ) return None except (ConnectionResetError, ValueError) as e: # ensures we break out of the main run() loop of Node, which triggers a disconnect callback to clean up self.client.disconnecting = True logger.debug( f"Failed to read message data for reason: {traceback.format_exc()}" ) return None except (asyncio.CancelledError, asyncio.IncompleteReadError): return None except Exception: # ensures we break out of the main run() loop of Node, which triggers a disconnect callback to clean up logger.debug(f"error read message 1 {traceback.format_exc()}") return None
def get_designated_by_role(self, snapshot: storage.Snapshot, role: DesignateRole, index: int) -> List[cryptography.ECPoint]: if snapshot.best_block_height + 1 < index: raise ValueError( "[DesignateContract] Designate list index out of range") key = self.create_key( role.to_bytes(1, 'little') + self._to_uint32(index)).to_array() boundary = self.create_key(role.to_bytes(1, 'little')).to_array() for _, storage_item in snapshot.storages.find_range( key, boundary, "reverse"): with serialization.BinaryReader(storage_item.value) as reader: return reader.read_serializable_list(cryptography.ECPoint) else: return []
def _payload_from_data(msg_type, data): with serialization.BinaryReader(data) as br: if msg_type in [MessageType.INV, MessageType.GETDATA]: return br.read_serializable(payloads.InventoryPayload) elif msg_type == MessageType.GETBLOCKBYINDEX: return br.read_serializable(payloads.GetBlockByIndexPayload) elif msg_type == MessageType.VERSION: return br.read_serializable(payloads.VersionPayload) elif msg_type == MessageType.VERACK: return br.read_serializable(payloads.EmptyPayload) elif msg_type == MessageType.BLOCK: return br.read_serializable(payloads.Block) elif msg_type == MessageType.HEADERS: return br.read_serializable(payloads.HeadersPayload) elif msg_type in [MessageType.PING, MessageType.PONG]: return br.read_serializable(payloads.PingPayload) elif msg_type == MessageType.ADDR: return br.read_serializable(payloads.AddrPayload) elif msg_type == MessageType.TRANSACTION: return br.read_serializable(payloads.Transaction) else: logger.debug(f"Unsupported payload {msg_type.name}")
def test_read_int32(self): with serialization.BinaryReader(b'\x01\x02\x03\xFF') as br: b = br.read_int32() self.assertEqual(-16580095, b)
def test_read_bytes_with_grouping(self): # test with invalid group_size's with self.assertRaises(ValueError) as context: with serialization.BinaryReader(b'') as br: br.read_bytes_with_grouping(group_size=-1) self.assertIn("group_size must be > 0 and <= 254", str(context.exception)) with self.assertRaises(ValueError) as context: with serialization.BinaryReader(b'') as br: br.read_bytes_with_grouping(group_size=-0) self.assertIn("group_size must be > 0 and <= 254", str(context.exception)) with self.assertRaises(ValueError) as context: with serialization.BinaryReader(b'') as br: br.read_bytes_with_grouping(group_size=255) self.assertIn("group_size must be > 0 and <= 254", str(context.exception)) # read empty value group_size = 16 with serialization.BinaryWriter() as bw: bw.write_bytes_with_grouping(b'', group_size) data = bw._stream.getvalue() with serialization.BinaryReader(data) as br: result = br.read_bytes_with_grouping(group_size) self.assertEqual(b'', result) # test with value smaller than group_size with serialization.BinaryWriter() as bw: input = b'\x11' * 10 bw.write_bytes_with_grouping(input, group_size) data = bw._stream.getvalue() with serialization.BinaryReader(data) as br: result = br.read_bytes_with_grouping(group_size) self.assertEqual(input, result) # test with value exact same length as group_size with serialization.BinaryWriter() as bw: input = b'\x11' * 16 bw.write_bytes_with_grouping(input, group_size) data = bw._stream.getvalue() with serialization.BinaryReader(data) as br: result = br.read_bytes_with_grouping(group_size) self.assertEqual(input, result) # test with value exceeding length of group_size (thus having 2 groups) with serialization.BinaryWriter() as bw: input = b'\x11' * 20 bw.write_bytes_with_grouping(input, group_size) data = bw._stream.getvalue() with serialization.BinaryReader(data) as br: result = br.read_bytes_with_grouping(group_size) self.assertEqual(input, result) # test with invalid group size encoding group_data = b'\x11' * 16 # this should not be bigger than `group_size`, in this case b'\x10' remaining_group_length = b'\x11' data = group_data + remaining_group_length with self.assertRaises(ValueError) as context: with serialization.BinaryReader(data) as br: br.read_bytes_with_grouping(group_size) self.assertIn("corrupt remainder length", str(context.exception))
def test_serialize(self): # if test_create_default() passes, then we know `to_json()` is ok, which serialize internally uses cm = contracts.ContractManifest(types.UInt160.zero()) with serialization.BinaryReader(cm.to_array()) as br: data = br.read_var_string() self.assertDictEqual(self.expected_json, json.loads(data))
def deserialize(data: bytes, max_size: int, max_item_size: int, reference_counter: vm.ReferenceCounter) -> vm.StackItem: """ Deserialize data into a stack item. Args: data: byte array of a serialized stack item. max_size: data reading limit for Array, Struct and Map types. max_item_size: data reading limit for ByteString or Buffer types. reference_counter: a valid reference counter instance. Get's passed into reference stack items. """ if len(data) == 0: raise ValueError("Nothing to deserialize") deserialized: List[Union[vm.StackItem, PlaceHolder]] = [] to_deserialize = 1 with serialization.BinaryReader(data) as reader: while not to_deserialize == 0: to_deserialize -= 1 item_type = vm.StackItemType(reader.read_byte()[0]) if item_type == vm.StackItemType.ANY: deserialized.append(vm.NullStackItem()) elif item_type == vm.StackItemType.BOOLEAN: deserialized.append(vm.BooleanStackItem( reader.read_bool())) elif item_type == vm.StackItemType.INTEGER: deserialized.append( vm.IntegerStackItem( vm.BigInteger( reader.read_var_bytes( vm.IntegerStackItem.MAX_SIZE)))) elif item_type == vm.StackItemType.BYTESTRING: deserialized.append( vm.ByteStringStackItem( reader.read_var_bytes(max_item_size))) elif item_type == vm.StackItemType.BUFFER: deserialized.append( vm.BufferStackItem( reader.read_var_bytes(max_item_size))) elif item_type in [ vm.StackItemType.ARRAY, vm.StackItemType.STRUCT ]: count = reader.read_var_int(max_size) deserialized.append(PlaceHolder(item_type, count)) to_deserialize += count elif item_type == vm.StackItemType.MAP: count = reader.read_var_int(max_size) deserialized.append(PlaceHolder(item_type, count)) to_deserialize += count * 2 else: raise ValueError("Invalid format") temp: List[vm.StackItem] = [] while len(deserialized) > 0: item = deserialized.pop() if type(item) == PlaceHolder: item = cast(PlaceHolder, item) if item.type == vm.StackItemType.ARRAY: array = vm.ArrayStackItem(reference_counter) for _ in range(0, item.count): array.append(temp.pop()) temp.append(array) elif item.type == vm.StackItemType.STRUCT: struct = vm.StructStackItem(reference_counter) for _ in range(0, item.count): struct.append(temp.pop()) temp.append(struct) elif item.type == vm.StackItemType.MAP: m = vm.MapStackItem(reference_counter) for _ in range(0, item.count): k = temp.pop() k = cast(vm.PrimitiveType, k) v = temp.pop() m[k] = v temp.append(m) else: item = cast(vm.StackItem, item) temp.append(item) return temp.pop()
def test_length(self): input_data = b'\x02\x41\x42' with serialization.BinaryReader(input_data) as br: self.assertEqual(3, len(br))
def test_read_int16(self): input_data = b'\x01\xFF' with serialization.BinaryReader(input_data) as br: b = br.read_int16() self.assertEqual(-255, b)
def _request(self, engine: contracts.ApplicationEngine, url: str, filter: str, callback: str, user_data: vm.StackItem, gas_for_response: int) -> None: if len(url.encode('utf-8')) > self._MAX_URL_LENGTH or \ len(filter.encode('utf-8')) > self._MAX_FILTER_LEN or \ len(callback.encode('utf-8')) > self._MAX_CALLBACK_LEN or \ callback.startswith("_") or \ gas_for_response < 10000000: raise ValueError engine.add_gas(self.get_price(engine.snapshot)) engine.add_gas(gas_for_response) self._gas.mint(engine, self.hash, vm.BigInteger(gas_for_response), False) si_item_id = engine.snapshot.storages.get(self.key_request_id, read_only=False) item_id = vm.BigInteger(si_item_id.value) si_item_id.value = (item_id + 1).to_array() if contracts.ManagementContract().get_contract(engine.snapshot, engine.calling_scripthash) is None: raise ValueError oracle_request = OracleRequest(self._get_original_txid(engine), gas_for_response, url, filter, engine.calling_scripthash, callback, contracts.BinarySerializer.serialize(user_data, self._MAX_USER_DATA_LEN)) engine.snapshot.storages.put(self.key_request + int(item_id).to_bytes(8, 'little', signed=False), storage.StorageItem(oracle_request.to_array()) ) sk_id_list = self.key_id_list + self._get_url_hash(url) si_id_list = engine.snapshot.storages.try_get(sk_id_list, read_only=False) if si_id_list is None: si_id_list = storage.StorageItem(b'\x00') with serialization.BinaryReader(si_id_list.value) as reader: count = reader.read_var_int() id_list = [] for _ in range(count): id_list.append(reader.read_uint64()) id_list.append(item_id) if len(id_list) >= 256: raise ValueError("Oracle has too many pending responses for this url") with serialization.BinaryWriter() as writer: writer.write_var_int(len(id_list)) for id in id_list: writer.write_uint64(id) si_id_list.value = writer.to_array() engine.snapshot.storages.update(sk_id_list, si_id_list) state = vm.ArrayStackItem( engine.reference_counter, [vm.IntegerStackItem(item_id), vm.ByteStringStackItem(engine.calling_scripthash.to_array()), vm.ByteStringStackItem(url.encode()), vm.ByteStringStackItem(filter.encode()), ] ) msgrouter.interop_notify(self.hash, "OracleRequest", state)
def test_read_int64(self): with serialization.BinaryReader( b'\x01\x02\x03\x04\x05\x06\x07\xFF') as br: b = br.read_int64() self.assertEqual(-70080650589044223, b)