def test_decode_aligned_uint(self): uint = UInt(1, 'foo', None, align='4') decoded = uint.decode(to_bin('0xff00 0000'), None) self.assertEquals(decoded.int, 255) self.assertEquals(decoded.hex, '0xff') self.assertEquals(len(decoded), 4) self.assertEquals(decoded._raw, to_bin('0xff00 0000'))
def test_decode_returns_used_length(self): field_template = UInt(2, 'field', 6) data = to_bin('0xcafebabeff00ff00') decoded = field_template.decode(data, {}) self.assertEquals(decoded.hex, '0xcafe') self.assertEquals(len(decoded), 2)
def test_decode_uint(self): field_template = UInt(2, 'field', 6) decoded = field_template.decode(to_bin('0xcafe'), {}) self.assertEquals(decoded.hex, '0xcafe')
def test_little_endian_uint_decode(self): template = UInt(2, 'field', None) field = template.decode(to_bin('0x0100'), None, little_endian=True) self.assertEquals(field._raw, to_bin('0x0100')) self.assertEquals(field.int, 1) self.assertEquals(field.bytes, to_bin('0x0001'))