def test_save_discard_restore(): buffer = Buffer() buffer.add(b"spam") buffer.save() assert len(buffer) == 4 buffer.discard() assert len(buffer) == 0 buffer.restore() assert len(buffer) == 4
def test_read(): buffer = Buffer() buffer.add(b"spam") assert buffer.read(2) == b"sp" assert buffer.read(2) == b"am" buffer.add(b"eggs") assert buffer.read() == b"eggs" with pytest.raises(BufferUnderrun): buffer.read(1)
def to_bytes(self): string = b"" for name, tag in self.value.items(): string += Buffer.pack('b', _ids[type(tag)]) string += TagString(name).to_bytes() string += tag.to_bytes() if len(self.value) == 0 or not self.root: string += Buffer.pack('b', 0) return string
def to_bytes(self): if len(self.value) > 0: head = self.value[0] else: head = TagByte(0) return Buffer.pack('bi', _ids[type(head)], len(self.value)) + \ b"".join(tag.to_bytes() for tag in self.value)
def load_chunk(self, chunk_x, chunk_z): """ Loads the chunk at the given co-ordinates from the region file. The co-ordinates should range from 0 to 31. Returns a ``TagRoot``. """ buff = Buffer() # Read extent header self.fd.seek(4 * (32 * chunk_z + chunk_x)) buff.add(self.fd.read(4)) entry = buff.unpack('I') chunk_offset, chunk_length = entry >> 8, entry & 0xFF if chunk_offset == 0: raise ValueError((chunk_x, chunk_z)) # Read chunk self.fd.seek(4096 * chunk_offset) buff.add(self.fd.read(4096 * chunk_length)) chunk = buff.read(buff.unpack('IB')[0]) chunk = zlib.decompress(chunk) chunk = TagRoot.from_bytes(chunk) return chunk
def save_chunk(self, chunk): """ Saves the given chunk, which should be a ``TagRoot``, to the region file. """ # Compress chunk chunk_x = chunk.body.value["Level"].value["xPos"].value chunk_z = chunk.body.value["Level"].value["zPos"].value chunk = zlib.compress(chunk.to_bytes()) chunk = Buffer.pack('IB', len(chunk), 2) + chunk chunk_length = 1 + (len(chunk) - 1) // 4096 # Load extents extents = [(0, 2)] self.fd.seek(0) buff = Buffer(self.fd.read(4096)) for idx in range(1024): z, x = divmod(idx, 32) entry = buff.unpack('I') offset, length = entry >> 8, entry & 0xFF if offset > 0 and not (x == chunk_x and z == chunk_z): extents.append((offset, length)) extents.sort() extents.append((extents[-1][0] + extents[-1][1] + chunk_length, 0)) # Compute new extent for idx in range(len(extents) - 1): start = extents[idx][0] + extents[idx][1] end = extents[idx + 1][0] if (end - start) >= chunk_length: chunk_offset = start extents.insert(idx + 1, (chunk_offset, chunk_length)) break # Write extent header self.fd.seek(4 * (32 * chunk_z + chunk_x)) self.fd.write( Buffer.pack('I', (chunk_offset << 8) | (chunk_length & 0xFF))) # Write timestamp header self.fd.seek(4096 + 4 * (32 * chunk_z + chunk_x)) self.fd.write(Buffer.pack('I', int(time.time()))) # Write chunk self.fd.seek(4096 * chunk_offset) self.fd.write(chunk) # Truncate file self.fd.seek(4096 * extents[-1][0]) self.fd.truncate()
def test_unpack_uuid(): buffer = Buffer() buffer.add(uuid_vector) assert buffer.unpack_uuid().to_bytes() == uuid_vector
def test_unpack_chat(): buffer = Buffer() buffer.add(b'\x11["spam", " eggs"]') assert buffer.unpack_chat().to_string() == "spam eggs" buffer.add(b'\x22{"text": "spam", "extra": " eggs"}') assert buffer.unpack_chat().to_string() == "spam eggs" buffer.add(b'\x14{"translate": "foo"}') assert buffer.unpack_chat().to_string() == "foo" buffer.add(b'\x2E{"translate": "foo", "with": ["spam", "eggs"]}') assert buffer.unpack_chat().to_string() == "foo{spam, eggs}"
def test_unpack_json(): buffer = Buffer() buffer.add(b'\x10{"spam": "eggs"}') assert buffer.unpack_json() == {"spam": "eggs"}
def test_pack_slot(): for value, data in slot_vectors: assert Buffer.pack_slot(**value) == data
def from_bytes(cls, bytes): return cls.from_buff(Buffer(bytes))
def test_pack(): for fmt, data, values in pack_unpack_vectors: if not isinstance(values, tuple): values = (values,) assert Buffer.pack(fmt, *values) == data
def test_add(): buffer = Buffer() buffer.add(b"spam") assert len(buffer) == 4 assert buffer.read() == b"spam"
def to_bytes(self): return Buffer.pack_json(self.value)
def to_bytes(self): data = self.value.to_bytes() data = Buffer.pack('i', len(data) // (self.width // 8)) + data return data
def to_bytes(self): return Buffer.pack(self.fmt, self.value)
def test_unpack_slot(): buffer = Buffer() for value, data in slot_vectors: buffer.add(data) assert buffer.unpack_slot() == value assert len(buffer) == 0
def test_unpack_entity_metadata(): buffer = Buffer() for value, data in entity_metadata_vectors: buffer.add(data) assert buffer.unpack_entity_metadata() == value assert len(buffer) == 0
def test_pack_json(): assert Buffer.pack_json({"spam": "eggs"}) == b'\x10{"spam": "eggs"}'
def test_pack_string(): assert Buffer.pack_string("spam") == b"\x04spam"
def test_pack_varint(): for value, data in varint_vectors: assert Buffer.pack_varint(value) == data
def test_pack_chat(): assert Buffer.pack_chat("spam") == b'\x10{"text": "spam"}'
def test_unpack(): buffer = Buffer() for fmt, data, values in pack_unpack_vectors: buffer.add(data) assert buffer.unpack(fmt) == values
def test_pack_uuid(): assert Buffer.pack_uuid(UUID.from_bytes(uuid_vector)) == uuid_vector
def test_unpack_string(): buffer = Buffer() buffer.add(b"\x04spam") assert buffer.unpack_string() == "spam"
def test_pack_entity_metadata(): for value, data in entity_metadata_vectors: assert Buffer.pack_entity_metadata(value) == data
def to_bytes(self): data = self.value.encode('utf8') return Buffer.pack('H', len(data)) + data