def send_packet(self, name, *data): """Sends a packet to the remote.""" if self.closed: return self.log_packet("# send", name) data = b"".join(data) # Prepend ident key = (self.protocol_version, self.protocol_mode, self.send_direction, name) try: ident = packets.packet_idents[key] except KeyError: raise ProtocolError("No ID known for packet: %s" % (key, )) data = Buffer.pack_varint(ident) + data if self.compression_enabled: # Compress data and prepend uncompressed data length if len(data) >= self.compression_threshold: data = Buffer.pack_varint(len(data)) + zlib.compress(data) else: data = Buffer.pack_varint(0) + data # Prepend packet length max_bits = 32 if self.protocol_mode == "play" else 21 data = self.buff_type.pack_varint(len(data), max_bits=max_bits) + data # Encrypt data = self.cipher.encrypt(data) # Send self.transport.write(data)
def to_bytes(self): if len(self.value) > 0: type_id = _ids[type(self.value[0])] else: type_id = 1 return (Buffer.pack('b', type_id) + Buffer.pack('i', len(self.value)) + b"".join(tag.to_bytes() for tag in self.value))
def data_received(self, data): # Decrypt data data = self.cipher.decrypt(data) # Add it to our buffer self.recv_buff.add(data) # Read some packets while not self.closed: # Save the buffer, in case we read an incomplete packet self.recv_buff.save() # Try to read a packet try: max_bits = 32 if self.protocol_mode == "play" else 21 packet_length = self.recv_buff.unpack_varint(max_bits=max_bits) packet_body = self.recv_buff.read(packet_length) # Incomplete packet read, restore the buffer. except BufferUnderrun: self.recv_buff.restore() break # Load the packet body into a buffer packet_buff = self.buff_type() packet_buff.add(packet_body) try: # Catch protocol errors try: # Catch buffer overrun/underrun if self.compression_enabled: uncompressed_length = packet_buff.unpack_varint() if uncompressed_length > 0: data = zlib.decompress(packet_buff.read()) packet_buff = Buffer() packet_buff.add(data) ident = packet_buff.unpack_varint() key = (self.protocol_version, self.protocol_mode, self.recv_direction, ident) try: name = packets.packet_names[key] except KeyError: raise ProtocolError("No name known for packet: %s" % (key, )) self.packet_received(packet_buff, name) except BufferUnderrun: raise ProtocolError("Packet is too short!") if len(packet_buff) > 0: raise ProtocolError("Packet is too long!") except ProtocolError as e: self.protocol_error(e) break # We've read a complete packet, so reset the inactivity timeout self.connection_timer.restart()
def test_save_discard_restore(): buffer = Buffer() buffer.add(b"spam") buffer.save() assert len(buffer) == 4 buffer.discard() assert len(buffer) == 0 buffer.restore() assert len(buffer) == 4
def test_read(): buffer = Buffer() buffer.add(b"spam") assert buffer.read(2) == b"sp" assert buffer.read(2) == b"am" buffer.add(b"eggs") assert buffer.read() == b"eggs" with pytest.raises(BufferUnderrun): buffer.read(1)
def to_bytes(self): string = b"" for name, tag in self.value.items(): string += Buffer.pack('b', _ids[type(tag)]) string += TagString(name).to_bytes() string += tag.to_bytes() if len(self.value) == 0 or not self.root: string += Buffer.pack('b', 0) return string
def save_chunk(self, chunk): """ Saves the given chunk, which should be a ``TagRoot``, to the region file. """ # Compress chunk chunk_x = chunk.body.value["Level"].value["xPos"].value chunk_z = chunk.body.value["Level"].value["zPos"].value chunk = zlib.compress(chunk.to_bytes()) chunk = Buffer.pack('IB', len(chunk), 2) + chunk chunk_length = 1 + (len(chunk) - 1) // 4096 # Load extents extents = [(0, 2)] self.fd.seek(0) buff = Buffer(self.fd.read(4096)) for idx in range(1024): z, x = divmod(idx, 32) entry = buff.unpack('I') offset, length = entry >> 8, entry & 0xFF if offset > 0 and not (x == chunk_x and z == chunk_z): extents.append((offset, length)) extents.sort() extents.append((extents[-1][0] + extents[-1][1] + chunk_length, 0)) # Compute new extent for idx in range(len(extents) - 1): start = extents[idx][0] + extents[idx][1] end = extents[idx+1][0] if (end - start) >= chunk_length: chunk_offset = start extents.insert(idx+1, (chunk_offset, chunk_length)) break # Write extent header self.fd.seek(4 * (32 * chunk_z + chunk_x)) self.fd.write(Buffer.pack( 'I', (chunk_offset << 8) | (chunk_length & 0xFF))) # Write timestamp header self.fd.seek(4096 + 4 * (32 * chunk_z + chunk_x)) self.fd.write(Buffer.pack('I', int(time.time()))) # Write chunk self.fd.seek(4096 * chunk_offset) self.fd.write(chunk) # Truncate file self.fd.seek(4096 * extents[-1][0]) self.fd.truncate()
def test_chunk_pack_unpack(): with open(chunk_path, "rb") as fd: chunk_data_before = fd.read() buff = Buffer(chunk_data_before) blocks, block_lights, sky_lights = buff.unpack_chunk_section() chunk_data_after = Buffer.pack_chunk_section(blocks, block_lights, sky_lights) assert len(buff) == 0 assert blocks[1400:1410] == [32, 32, 32, 288, 275, 288, 288, 497, 497, 0] assert block_lights[1400:1410] == [0, 0, 0, 0, 0, 0, 0, 0, 0, 0] assert sky_lights[1400:1410] == [0, 0, 0, 13, 0, 12, 11, 10, 14, 15] assert chunk_data_before == chunk_data_after
def to_bytes(self): if len(self.value) > 0: head = self.value[0] else: head = TagByte(0) return Buffer.pack('bi', _ids[type(head)], len(self.value)) + \ b"".join(tag.to_bytes() for tag in self.value)
def __init__(self, name: str, contributors: list, environment: dict, folder: str, version: str, viewpoints: list): self.name = name self.contributors = contributors self.time = environment.get('time', 0) self.dimension = environment.get('dimension', 'Overworld') self.weather = environment.get('weather', 'clear') self.cycle = environment.get('cycle', False) self.packets = list() self.viewpoints = list() path = os.path.join(os.getcwd(), './packets', folder, version, '*.bin') for filename in sorted(glob.glob(path)): file = open(filename, 'rb') packet_type = re.match(r'(\d+)_dn_([a-z_]+)(_[\w-]*)?.bin', os.path.basename(filename)) self.packets.append({ "id": packet_type.group(1), "type": packet_type.group(2), "packet": Buffer(file.read()).read() }) file.close() for viewpoint in viewpoints: parts = [0, 0, 0, 0, 0] for i, part in enumerate(viewpoint.split(',')): parts[i] = part self.viewpoints.append({ "x": float(parts[0]), "y": float(parts[1]), "z": float(parts[2]), "yaw": float(parts[3]), "yaw_256": int((float(parts[3]) / 360) * 256), "pitch": int(float(parts[4])), }) if len(self.viewpoints) == 0: self.viewpoints.append({ "x": 0.0, "y": 0.0, "z": 0.0, "yaw": 0, "yaw_256": 0, "pitch": 0 })
def load_chunk(self, chunk_x, chunk_z): """ Loads the chunk at the given co-ordinates from the region file. The co-ordinates should range from 0 to 31. Returns a ``TagRoot``. """ buff = Buffer() # Read extent header self.fd.seek(4 * (32 * chunk_z + chunk_x)) buff.add(self.fd.read(4)) entry = buff.unpack('I') chunk_offset, chunk_length = entry >> 8, entry & 0xFF # Read chunk self.fd.seek(4096 * chunk_offset) buff.add(self.fd.read(4096 * chunk_length)) chunk = buff.read(buff.unpack('IB')[0]) chunk = zlib.decompress(chunk) chunk = TagRoot.from_bytes(chunk) return chunk
def save_chunk(self, chunk): """ Saves the given chunk, which should be a ``TagRoot``, to the region file. """ # Compress chunk chunk_x = chunk.body.value["Level"].value["xPos"].value chunk_z = chunk.body.value["Level"].value["zPos"].value chunk = zlib.compress(chunk.to_bytes()) chunk = Buffer.pack('IB', len(chunk), 2) + chunk chunk_length = 1 + (len(chunk) - 1) // 4096 # Load extents extents = [(0, 2)] self.fd.seek(0) buff = Buffer(self.fd.read(4096)) for idx in range(1024): z, x = divmod(idx, 32) entry = buff.unpack('I') offset, length = entry >> 8, entry & 0xFF if offset > 0 and not (x == chunk_x and z == chunk_z): extents.append((offset, length)) extents.sort() extents.append((extents[-1][0] + extents[-1][1] + chunk_length, 0)) # Compute new extent for idx in range(len(extents) - 1): start = extents[idx][0] + extents[idx][1] end = extents[idx + 1][0] if (end - start) >= chunk_length: chunk_offset = start extents.insert(idx + 1, (chunk_offset, chunk_length)) break # Write extent header self.fd.seek(4 * (32 * chunk_z + chunk_x)) self.fd.write( Buffer.pack('I', (chunk_offset << 8) | (chunk_length & 0xFF))) # Write timestamp header self.fd.seek(4096 + 4 * (32 * chunk_z + chunk_x)) self.fd.write(Buffer.pack('I', int(time.time()))) # Write chunk self.fd.seek(4096 * chunk_offset) self.fd.write(chunk) # Truncate file self.fd.seek(4096 * extents[-1][0]) self.fd.truncate()
def load_chunk(self, chunk_x, chunk_z): """ Loads the chunk at the given co-ordinates from the region file. The co-ordinates should range from 0 to 31. Returns a ``TagRoot``. """ buff = Buffer() # Read extent header self.fd.seek(4 * (32 * chunk_z + chunk_x)) buff.add(self.fd.read(4)) entry = buff.unpack('I') chunk_offset, chunk_length = entry >> 8, entry & 0xFF if chunk_offset == 0: raise ValueError((chunk_x, chunk_z)) # Read chunk self.fd.seek(4096 * chunk_offset) buff.add(self.fd.read(4096 * chunk_length)) chunk = buff.read(buff.unpack('IB')[0]) chunk = zlib.decompress(chunk) chunk = TagRoot.from_bytes(chunk) return chunk
def test_pack_entity_metadata(): for value, data in entity_metadata_vectors: assert Buffer.pack_entity_metadata(value) == data
def to_bytes(self): return Buffer.pack_json(self.value)
def test_pack_uuid(): assert Buffer.pack_uuid(UUID.from_bytes(uuid_vector)) == uuid_vector
def test_pack_slot(): for value, data in slot_vectors: assert Buffer.pack_slot(**value) == data
def test_pack_chat(): assert Buffer.pack_chat("spam") == b'\x10{"text": "spam"}'
def test_pack_varint(): for value, data in varint_vectors: assert Buffer.pack_varint(value) == data
def to_bytes(self): return (Buffer.pack('i', len(self.value)) + Buffer.pack_array(self.fmt, self.value))
def test_unpack_json(): buffer = Buffer() buffer.add(b'\x10{"spam": "eggs"}') assert buffer.unpack_json() == {"spam": "eggs"}
def to_bytes(self): data = self.value.encode('utf8') return Buffer.pack('H', len(data)) + data
def to_bytes(self): return ( Buffer.pack('i', len(self.value)) + Buffer.pack_array(self.fmt, self.value))
def test_unpack_uuid(): buffer = Buffer() buffer.add(uuid_vector) assert buffer.unpack_uuid().to_bytes() == uuid_vector
def test_unpack_chat(): buffer = Buffer() buffer.add(b'\x11["spam", " eggs"]') assert buffer.unpack_chat().to_string() == "spam eggs" buffer.add(b'\x22{"text": "spam", "extra": " eggs"}') assert buffer.unpack_chat().to_string() == "spam eggs" buffer.add(b'\x14{"translate": "foo"}') assert buffer.unpack_chat().to_string() == "foo" buffer.add(b'\x2E{"translate": "foo", "with": ["spam", "eggs"]}') assert buffer.unpack_chat().to_string() == "foo{spam, eggs}"
def to_bytes(self): return Buffer.pack(self.fmt, self.value)
def test_unpack_entity_metadata(): buffer = Buffer() for value, data in entity_metadata_vectors: buffer.add(data) assert buffer.unpack_entity_metadata() == value assert len(buffer) == 0
def test_unpack_string(): buffer = Buffer() buffer.add(b"\x04spam") assert buffer.unpack_string() == "spam"
def from_bytes(cls, bytes): return cls.from_buff(Buffer(bytes))
def test_pack(): for fmt, data, values in pack_unpack_vectors: if not isinstance(values, tuple): values = (values, ) assert Buffer.pack(fmt, *values) == data
def test_pack_string(): assert Buffer.pack_string("spam") == b"\x04spam"
def test_unpack_slot(): buffer = Buffer() for value, data in slot_vectors: buffer.add(data) assert buffer.unpack_slot() == value assert len(buffer) == 0
def test_pack_json(): assert Buffer.pack_json({"spam": "eggs"}) == b'\x10{"spam": "eggs"}'