def test_io(): buf = Buffer() assert buf.buf == b"" assert buf.pos == 0 buf = Buffer(b"\x69\x00\x01\x02\x03") assert buf.read(1) == b"\x69" assert buf.read(2) == b"\x00\x01" assert buf.read() == b"\x02\x03" buf.reset() assert buf.read() == b"\x69\x00\x01\x02\x03" buf.reset()
async def fetch_player(self, uuid_: uuid.UUID) -> Player: try: return self.cache[int(uuid_)] except KeyError: file = os.path.join(self.data_dir, f"{uuid_}.dat") # filename of the player if not os.path.isfile(file): # create new player if needed level_data = self.server.worlds["minecraft:overworld"].data player = Player.new( self.server.api.eid(), uuid_, (level_data["SpawnX"], level_data["SpawnY"], level_data["SpawnX"]), "minecraft:overworld", ) self.cache[int(player.uuid)] = player return player async with aiofile.async_open(file, "rb") as player_file: # load preexisting player = Player( self.server.eid(), nbt.TAG_Compound.unpack(Buffer(await player_file.read())) ) self.cache[player.uuid] = player return player
def test_values_nantest( ): # tests that the values for loading the nantest are accurate with open(os.path.join("tests", "sample_data", "nantest.nbt"), "rb") as nbt_file: tag = nbt.unpack(Buffer(nbt_file.read())) assert tag["Air"].data == 300 assert tag["AttackTime"].data == 0 assert tag["DeathTime"].data == 0 assert tag["FallDistance"].data == 0.0 assert tag["Fire"].data == -20 assert tag["Health"].data == 20 assert tag["HurtTime"].data == 0 assert len(tag["Inventory"]) == 0 assert len(tag["Motion"]) == 3 assert tag["Motion"][0].data == 0 assert tag["Motion"][1].data == 0 assert tag["Motion"][2].data == 0 assert tag["OnGround"].data == 1 assert len(tag["Pos"]) == 3 assert tag["Pos"][0].data == 0.0 assert math.isnan(tag["Pos"][1].data) assert tag["Pos"][2].data == 0.0 assert len(tag["Rotation"]) == 2 assert tag["Rotation"][0].data == 164.3999481201172 assert tag["Rotation"][1].data == -63.150203704833984
def test_bigtest(): # tests that loading bigtest.nbt works without errors with open(os.path.join("tests", "sample_data", "bigtest.nbt"), "rb") as nbt_file: buf = Buffer(nbt_file.read()) tag = nbt.unpack(buf) assert tag.pack() == buf.buf
async def load_level_data(self): file = os.path.join(self.path, "level.dat") if os.path.isfile(file): async with aiofile.async_open(file, "rb") as level_data_file: return nbt.TAG_Compound.unpack(Buffer(await level_data_file.read())) return new_level_nbt((2586, self.server.meta.version, 19133), self.name, (0, 100, 0), self.server.conf["seed"])["Data"]
async def handle_packet(stream: Stream): packet_length = 0 # Basically an implementation of Buffer.unpack_varint() # except designed to read directly from a a StreamReader # and also to handle legacy server list ping packets for i in range(5): try: read = await asyncio.wait_for(stream.read(1), 5) except asyncio.TimeoutError: logger.debug("Closing due to timeout on read...") return False, stream if read == b"": logger.debug("Closing due to invalid read....") return False, stream if i == 0 and read == b"\xFE": logger.warn("Legacy ping attempted, legacy ping is not supported.") return False, stream b = struct.unpack("B", read)[0] packet_length |= (b & 0x7F) << 7 * i if not b & 0x80: break if packet_length & (1 << 31): packet_length -= 1 << 32 buf = Buffer(await stream.read(packet_length)) state = STATES.encode(states.get(stream.remote, 0)) packet = buf.unpack_packet(state, PACKET_MAP) logger.debug( f"IN : state:{state:<11} | id:0x{packet.id:02X} | packet:{type(packet).__name__}" ) for handler in pymine_api.packet.PACKET_HANDLERS[state][packet.id]: resp_value = await handler(stream, packet) try: continue_, stream = resp_value except ( ValueError, TypeError, ): logger.warn( f"Invalid return from packet handler: {handler.__module__}.{handler.__qualname__}" ) continue if not continue_: return False, stream return continue_, stream
def test_varint(var_int, error_msg): buf = Buffer() if error_msg: with pytest.raises(ValueError) as err: buf.write(Buffer.pack_varint(var_int)) assert error_msg in str(err) else: buf.write(Buffer.pack_varint(var_int)) assert buf.unpack_varint() == var_int
def test_json(): buf = Buffer() with open(os.path.join("tests", "sample_data", "test.json")) as test_file: data = json.load(test_file) buf.write(Buffer.pack_json(data)) for key, value in buf.unpack_json().items(): assert key in data assert data[key] == value
def test_varint(): buf = Buffer() buf.write(Buffer.pack_varint(0)) buf.write(Buffer.pack_varint(1)) buf.write(Buffer.pack_varint(3749146)) assert buf.unpack_varint() == 0 assert buf.unpack_varint() == 1 assert buf.unpack_varint() == 3749146
def fetch_chunk(cls, world_path: str, chunk_x: int, chunk_z: int) -> Chunk: chunk_data, timestamp = chunkio.fetchChunk( world_path, chunk_x, chunk_z) chunk_nbt = nbt.TAG_Compound.unpack( Buffer( zlib.decompress(b"".join( [ord(c).to_bytes(1, "big") for c in chunk_data])))) return Chunk(chunk_nbt, int(timestamp))
def test_basic(): buf = Buffer() buf.write( Buffer.pack("i", 123) + Buffer.pack("b", 1) + Buffer.pack("?", True) + Buffer.pack("q", 1234567890456)) assert buf.buf == b"\x00\x00\x00{\x01\x01\x00\x00\x01\x1fq\xfb\x06\x18" assert buf.unpack("i") == 123 assert buf.unpack("b") == 1 assert buf.unpack("?") is True assert buf.unpack("q") == 1234567890456
def test_optional_varint(): buf = Buffer() buf.write(Buffer.pack_optional_varint(1)) buf.write(Buffer.pack_optional_varint(2)) buf.write(Buffer.pack_optional_varint(None)) buf.write(Buffer.pack_optional_varint(3)) assert buf.unpack_optional_varint() == 1 assert buf.unpack_optional_varint() == 2 assert buf.unpack_optional_varint() is None assert buf.unpack_optional_varint() == 3
def test_string(): buf = Buffer() buf.write(Buffer.pack_string("")) buf.write(Buffer.pack_string("")) buf.write(Buffer.pack_string("2")) buf.write( Buffer.pack_string( "adkfj;adkfa;ldkfj\x01af\t\n\n00;\xc3\x85\xc3\x84\xc3\x96")) buf.write(Buffer.pack_string("")) buf.write(Buffer.pack_string("BrUh")) buf.write(Buffer.pack_string("")) assert buf.unpack_string() == "" assert buf.unpack_string() == "" assert buf.unpack_string() == "2" assert buf.unpack_string( ) == "adkfj;adkfa;ldkfj\x01af\t\n\n00;\xc3\x85\xc3\x84\xc3\x96" assert buf.unpack_string() == "" assert buf.unpack_string() == "BrUh" assert buf.unpack_string() == ""
def fetch_chunk(cls, world_path: str, chunk_x: int, chunk_z: int) -> Chunk: rx, ry = chunk_x // 32, chunk_z // 32 region_path = os.path.join(world_path, "region", f"r.{rx}.{ry}.mca") if not os.path.isfile(region_path): raise FileNotFoundError(region_path) loc_table_loc = cls.calc_offset(chunk_x, chunk_z) with open(region_path, "rb") as region_file: region_file.seek(loc_table_loc) offset, length = cls.find_chunk(region_file.read(4)) region_file.seek(loc_table_loc + 4096) timestamp = struct.unpack(">i", region_file.read(4)) region_file.seek(offset + 5) return Chunk( nbt.TAG_Compound.unpack( Buffer(zlib.decompress(region_file.read(length - 5)))), timestamp)
def encode(self) -> bytes: out = Buffer.pack("i", self.chunk.x) + Buffer.pack("i", self.chunk.z) + Buffer.pack("?", self.full) mask = 0 chunk_sections_buffer = Buffer() for y, section in self.chunk.sections.items(): # pack chunk columns into buffer and generate a bitmask if y >= 0: mask |= 1 << y chunk_sections_buffer.write(Buffer.pack_chunk_section(section)) out += Buffer.pack_varint(mask) + Buffer.pack_nbt( nbt.TAG_Compound("", [self.chunk["Heightmaps"]["MOTION_BLOCKING"], self.chunk["Heightmaps"]["WORLD_SURFACE"]]) ) if self.full: out += Buffer.pack_varint(len(Chunk["Biomes"])) + b"".join([Buffer.pack_varint(n) for n in Chunk["Biomes"]]) out += len(chunk_sections_buffer) + chunk_sections_buffer.read() # here we would pack the block entities, but we don't support them yet so we just send an array with length of 0 out += Buffer.pack_varint(0) return out
def decode(self, buf: Buffer) -> PlayPluginMessageServerBound: return PlayPluginMessageServerBound(buf.unpack_string(), Buffer(buf.read()))
import copy import os from pymine.types.buffer import Buffer import pymine.types.nbt as nbt with open(os.path.join("pymine", "data", "default_nbt", "dimension_codec.nbt"), "rb") as dim_codec_file: DEFAULT_DIM_CODEC_NBT = nbt.unpack(Buffer(dim_codec_file.read()), root_is_full=False) def new_dim_codec_nbt() -> nbt.TAG_Compound: return copy.deepcopy(DEFAULT_DIM_CODEC_NBT) def get_dimension_data(dimension: str) -> nbt.TAG_Compound: dims = DEFAULT_DIM_CODEC_NBT["minecraft:dimension_type"]["value"] for dim in dims: if dim["name"].data.endswith( dimension ): # just in case namespace (like minecraft:) isn't there return dim["element"] # def new_dim_codec_dim_props( # piglin_safe: int, # natural: int, # ambient_light: float, # infiniburn: str,
async def handle_packet( self, stream: Stream ): # Handle / respond to packets, this is called in a loop packet_length = 0 # Basically an implementation of Buffer.unpack_varint() # except designed to read directly from a a StreamReader # and also to handle legacy server list ping packets for i in range(3): try: read = await asyncio.wait_for(stream.read(1), 30) except asyncio.TimeoutError: self.console.debug("Closing due to timeout on read...") raise StopHandling if read == b"": self.console.debug("Closing due to invalid read....") raise StopHandling if i == 0 and read == b"\xFE": self.console.warn("Legacy ping attempted, legacy ping is not supported.") raise StopHandling b = struct.unpack(">B", read)[0] packet_length |= (b & 0x7F) << 7 * i if not b & 0x80: break if packet_length & (1 << 31): packet_length -= 1 << 32 buf = Buffer(await stream.read(packet_length)) state = self.cache.states.get(stream.remote, 0) try: packet = buf.unpack_packet(state, PACKET_MAP) except InvalidPacketID: self.console.warn("Invalid packet ID received.") return stream self.console.debug( f"IN : state: {state} | id:0x{packet.id:02X} | packet:{type(packet).__name__}" ) if self.api.register._on_packet[state].get(packet.id) is None: self.console.warn( f"No packet handler found for packet: 0x{packet.id:02X} {type(packet).__name__}" ) return stream for handler in self.api.register._on_packet[state][packet.id].values(): try: res = await handler(stream, packet) if isinstance(res, Stream): stream = res except StopHandling: raise except BaseException as e: self.console.error( f"Error occurred in {handler.__module__}.{handler.__qualname__}: {self.console.f_traceback(e)}" ) return stream