def test_save(): n = NBTFile(name='') n['byte'] = TAG_Byte(0) n['short'] = TAG_Short(1) n['int'] = TAG_Int(2) n['float'] = TAG_Float(3.) n['double'] = TAG_Double(4.) n['string'] = TAG_String('Testing') n['int_array'] = TAG_Int_Array([45, 5, 6]) n['byte_array'] = TAG_Byte_Array([4, 3, 2]) n['long_array'] = TAG_Long_Array([5, 6, 7]) n['list'] = TAG_List(TAG_Int, [ TAG_Int(4) ]) n['autolist_int'] = TAG_List(TAG_Int, [ 5, 6, 7, 30240, -340 ]) n['autolist_compound'] = TAG_List(TAG_Compound, [ { 'name': TAG_String('ABC'), 'health': TAG_Double(3.5) } ]) with open('__test__.nbt', 'wb') as io: n.save(io)
def set_spawn(self, spawn): with gzip.open(self.leveldat, 'rb') as io: nbt = NBTFile(io) nbt['Data']['SpawnX'].value = spawn[0] nbt['Data']['SpawnZ'].value = spawn[1] with gzip.open(self.leveldat, 'wb') as io: nbt.save(io)
def read_nbtdata(self, compressed=True): data = None length = self.read_short() if length != -1: byte_array = self.read_bytearray(length=length) if compressed: data = NBTFile(BytesIO(byte_array), compression=NBTFile.Compression.GZIP) else: data = NBTFile(BytesIO(byte_array)) return data
def readEntityMetadata(FileObject): metadata = {} byte = readUnsignedByte(FileObject) while byte != 127: index = byte & 0x1F # Lower 5 bits ty = byte >> 5 # Upper 3 bits if ty == 0: val = readByte(FileObject) if ty == 1: val = readShort(FileObject) if ty == 2: val = readInt(FileObject) if ty == 3: val = readFloat(FileObject) if ty == 4: val = readString(FileObject) if ty == 5: val = {} val["id"] = readShort(FileObject) if (val["id"] != -1): val["count"] = readByte(FileObject) val["damage"] = readShort(FileObject) nbtDataLength = readShort(FileObject) if (nbtDataLength != -1): val["NBT"] = NBTFile(BytesIO( readByteArray(FileObject, nbtDataLength)), compression=NBTFile.Compression.GZIP) if ty == 6: val = [] for i in range(3): val.append(readInt(FileObject)) metadata[index] = (ty, val) byte = readUnsignedByte(FileObject) return metadata
def __init__(self, log): self.log = log with gzip.open(self.leveldat, 'rb') as io: data = NBTFile(io)['Data'] self.spawn = (data['SpawnX'].value, data['SpawnZ'].value) self.seed = data['RandomSeed'].value
def map_to_img(nbt_file, img_file, version=DEFAULT_VERSION, warn=False): nbt = NBTFile(io=GzipFile(mode='r', fileobj=nbt_file)) width = nbt['data']['width'].value if 'width' in nbt['data'] else None height = nbt['data']['height'].value if 'height' in nbt['data'] else None map_data_to_img(nbt['data']['colors'].value, img_file, version=version, warn=warn, width=width, height=height)
def test_parse(self): """ Test to ensure PyNBT can parse the defacto-test file, "bigtest.nbt". """ nbt = NBTFile(self.io) # Ensure every base tag was parsed. self.assertTrue(len(nbt) == 11) # Test 3 tag types, and deep compounds. tag = nbt['listTest (compound)'].value[0]['created-on'] self.assertTrue(tag.value == 1264099775885)
def handle84(FileObject): X = DataUtil.readInt(FileObject) Y = DataUtil.readShort(FileObject) Z = DataUtil.readInt(FileObject) Action = DataUtil.readByte(FileObject) DataLength = DataUtil.readShort(FileObject) if (DataLength != -1): ByteArray = DataUtil.readByteArray(FileObject, DataLength) NBTData = NBTFile(BytesIO(ByteArray), compression=NBTFile.Compression.GZIP) return {'x': X, 'y': Y, 'z': Z, 'Action': Action, 'NBTData': NBTData} return {'x': X, 'y': Y, 'z': Z, 'Action': Action}
def read_slot(self): slot = Slot() slot.blockID = self.read_short() if slot.blockID != -1: slot.count = self.read_byte() slot.damage = self.read_short() data_len = self.read_short() if data_len != -1: byte_array = self.read_bytearray(length=data_len) slot.data = NBTFile(BytesIO(byte_array), compression=NBTFile.Compression.GZIP) return slot
def select_file(): filename = askopenfilename( initialdir="~/.minecraft-pi/games/com.mojang/minecraftWorlds", filetypes=[('NBT Files', 'level.dat')]) if not type(filename) is str: exit(0) _, temp_filename = tempfile.mkstemp() completed_process = subprocess.run( ['./pi-nbt', 'remove-header', filename, temp_filename]) if completed_process.returncode != 0: raise Exception('Unable To Prepare Files') context.current_file = filename context.current_temp_file = temp_filename with open(temp_filename, 'rb') as io: context.nbt_file = NBTFile(io, little_endian=True)
def test_parse(): """ Test to ensure PyNBT can parse the defacto-test file, "bigtest.nbt". """ with gzip.GzipFile(fileobj=BytesIO(BIG_TEST)) as io: nbt = NBTFile(io) # Ensure every base tag was parsed. assert len(nbt) == 11 # Test 3 tag types, and deep compounds. tag = nbt['listTest (compound)'].value[0]['created-on'] assert tag.value == 1264099775885 tag = nbt['byteArrayTest (the first 1000 values of (n*n*255+n*7)%100,' ' starting with n=0 (0, 62, 34, 16, 8, ...))'] for i in range(0, 1000): assert tag.value[i] == (i * i * 255 + i * 7) % 100
def readSlotData(FileObject): BlockID = readShort(FileObject) if (BlockID != -1): ItemCount = readByte(FileObject) Damage = readShort(FileObject) MetadataLength = readShort(FileObject) if (MetadataLength != -1): ByteArray = readByteArray(FileObject, MetadataLength) NBTData = NBTFile(BytesIO(ByteArray), compression=NBTFile.Compression.GZIP) return { 'BlockID': BlockID, 'ItemCount': ItemCount, 'Damage': Damage, 'Data': NBTData } return {'BlockID': BlockID, 'ItemCount': ItemCount, 'Damage': Damage} return {'BlockID': -1, 'ItemCount': 0}
def _decode(self, obj, context): return NBTFile(StringIO(obj), compression=NBTFile.Compression.GZIP)
def carto(self, radius, center=(0, 0), resolution=1): c_lower = tuple((c - radius) // 16 for c in center) c_upper = tuple((c + radius) // 16 - 1 for c in center) r_count = (1 + c_upper[0] // 32 - c_lower[0] // 32) * ( 1 + c_upper[1] // 32 - c_lower[1] // 32) r_index = 1 #Loop over regions for rz in range(c_lower[1] // 32, 1 + c_upper[1] // 32): for rx in range(c_lower[0] // 32, 1 + c_upper[0] // 32): self.log( 1, 'generating carto ({0} of {1})'.format(r_index, r_count)) r_index += 1 r_file = open(self.region.format(rx, rz), 'rb') chunk_offsets = [] #Read the header .mca header for cz in range(32): for cx in range(32): r_tmp = r_file.read(4) #Ignore chunks we don't care about if c_lower[0] <= cx + rx * 32 <= c_upper[0] and \ c_lower[1] <= cz + rz * 32 <= c_upper[1]: r_tmp = struct.unpack('>I', r_tmp)[0] chunk_offsets.append((cz, cx, r_tmp >> 8)) #Loop over 16x16 chunks for cz, cx, offset in chunk_offsets: #Read r_file.seek(offset * 4096, 0) c_size, c_compression = struct.unpack( '>iB', r_file.read(5)) c_data = r_file.read(c_size) #Decompress c_data = zlib.decompress(c_data) #Load c_nbt = NBTFile(io=BytesIO(c_data))['Level'] c_heightmap = c_nbt['HeightMap'].value c_biomes = c_nbt['Biomes'].value c_sections = {s['Y'].value: s for s in c_nbt['Sections']} #Loop over 1x1 columns for pz in range(0, 16, resolution): for px in range(0, 16, resolution): fx = rx * 512 + cx * 16 + px fz = rz * 512 + cz * 16 + pz f_height = c_heightmap[16 * pz + px] - 1 f_biome = c_biomes[16 * pz + px] p_section, p_inner = divmod(f_height, 16) # key for input byte array f_key1 = 256 * p_inner + 16 * pz + px # key info for input nibble array f_key2 = divmod(f_key1, 2) f_data = c_sections[p_section]['Blocks'].value[ f_key1] % 256 f_meta = c_sections[p_section]['Data'].value[ f_key2[0]] % 256 f_meta = f_meta >> 4 if f_key2[ 1] == 0 else f_meta & 0x0F yield fx, fz, f_data, f_meta, f_height, f_biome r_file.close()
def main(argv): with open(sys.argv[1], 'rb') as fin: n = NBTFile(fin) print(n.pretty())