Ejemplo n.º 1
0
    def findTrueSpawn(self):
        """Adds the true spawn location to self.POI.  The spawn Y coordinate
        is almost always the default of 64.  Find the first air block above
        that point for the true spawn location"""

        ## read spawn info from level.dat
        data = nbt.load(os.path.join(self.worlddir, "level.dat"))[1]
        spawnX = data['Data']['SpawnX']
        spawnY = data['Data']['SpawnY']
        spawnZ = data['Data']['SpawnZ']
   
        ## The chunk that holds the spawn location 
        chunkX = spawnX/16
        chunkY = spawnZ/16

        ## The filename of this chunk
        chunkFile = self.get_chunk_path(chunkX, chunkY)

        data=nbt.load(chunkFile)[1]
        level = data['Level']
        blockArray = numpy.frombuffer(level['Blocks'], dtype=numpy.uint8).reshape((16,16,128))

        ## The block for spawn *within* the chunk
        inChunkX = spawnX - (chunkX*16)
        inChunkZ = spawnZ - (chunkY*16)

        ## find the first air block
        while (blockArray[inChunkX, inChunkZ, spawnY] != 0):
            spawnY += 1


        self.POI.append( dict(x=spawnX, y=spawnY, z=spawnZ, 
                msg="Spawn", type="spawn", chunk=(inChunkX,inChunkZ)))
Ejemplo n.º 2
0
    def findTrueSpawn(self):
        """Adds the true spawn location to self.POI.  The spawn Y coordinate
        is almost always the default of 64.  Find the first air block above
        that point for the true spawn location"""

        ## read spawn info from level.dat
        data = nbt.load(os.path.join(self.worlddir, "level.dat"))[1]
        spawnX = data["Data"]["SpawnX"]
        spawnY = data["Data"]["SpawnY"]
        spawnZ = data["Data"]["SpawnZ"]

        ## The chunk that holds the spawn location
        chunkX = spawnX / 16
        chunkY = spawnZ / 16

        ## The filename of this chunk
        chunkFile = os.path.join(
            base36encode(chunkX % 64),
            base36encode(chunkY % 64),
            "c.%s.%s.dat" % (base36encode(chunkX), base36encode(chunkY)),
        )

        data = nbt.load(os.path.join(self.worlddir, chunkFile))[1]
        level = data["Level"]
        blockArray = numpy.frombuffer(level["Blocks"], dtype=numpy.uint8).reshape((16, 16, 128))

        ## The block for spawn *within* the chunk
        inChunkX = spawnX - (chunkX * 16)
        inChunkZ = spawnZ - (chunkY * 16)

        ## find the first air block
        while blockArray[inChunkX, inChunkZ, spawnY] != 0:
            spawnY += 1

        self.POI.append(dict(x=spawnX, y=spawnY, z=spawnZ, msg="Spawn"))
Ejemplo n.º 3
0
    def __init__(self, filename, create=False):
        self.zipfilename = filename

        tempdir = tempfile.mktemp("schematic")
        if create is False:
            zf = zipfile.ZipFile(filename)
            zf.extractall(tempdir)
            zf.close()

        super(ZipSchematic, self).__init__(tempdir, create)
        atexit.register(shutil.rmtree, self.worldFolder.filename, True)

        try:
            schematicDat = nbt.load(self.worldFolder.getFilePath("schematic.dat"))

            self.Width = schematicDat['Width'].value
            self.Height = schematicDat['Height'].value
            self.Length = schematicDat['Length'].value

            if "Materials" in schematicDat:
                self.materials = namedMaterials[schematicDat["Materials"].value]

        except Exception, e:
            print "Exception reading schematic.dat, skipping: {0!r}".format(e)
            self.Width = 0
            self.Length = 0
Ejemplo n.º 4
0
    def findTrueSpawn(self):
        """Adds the true spawn location to self.POI.  The spawn Y coordinate
        is almost always the default of 64.  Find the first air block above
        that point for the true spawn location"""

        ## read spawn info from level.dat
        data = nbt.load(os.path.join(self.worlddir, "level.dat"))[1]
        spawnX = data["Data"]["SpawnX"]
        spawnY = data["Data"]["SpawnY"]
        spawnZ = data["Data"]["SpawnZ"]

        ## The chunk that holds the spawn location
        chunkX = spawnX / 16
        chunkY = spawnZ / 16

        ## The filename of this chunk
        chunkFile = self.get_region_path(chunkX, chunkY)

        data = nbt.load_from_region(chunkFile, chunkX, chunkY)[1]
        level = data["Level"]
        blockArray = numpy.frombuffer(level["Blocks"], dtype=numpy.uint8).reshape((16, 16, 128))

        ## The block for spawn *within* the chunk
        inChunkX = spawnX - (chunkX * 16)
        inChunkZ = spawnZ - (chunkY * 16)

        ## find the first air block
        while blockArray[inChunkX, inChunkZ, spawnY] != 0:
            spawnY += 1
            if spawnY == 128:
                break

        self.POI.append(dict(x=spawnX, y=spawnY, z=spawnZ, msg="Spawn", type="spawn", chunk=(inChunkX, inChunkZ)))
        self.spawn = (spawnX, spawnY, spawnZ)
Ejemplo n.º 5
0
    def load(_data):
        if DEBUG_PE:
            global longest_complist_len
            global longest_complist
            global shortest_complist_len
            global shortest_complist

        sep = "\x00\x00\x00\x00\n"
        sep_data = _data.split(sep)
        compounds = []
        for d in sep_data:
            if len(d) != 0:
                if not d.startswith("\n"):
                    d = "\n" + d
                if DEBUG_PE:
                    if len(d) > longest_complist_len:
                        longest_complist = repr(d)
                        longest_complist_len = len(d)
                    if len(d) < shortest_complist_len:
                        shortest_complist = repr(d)
                        shortest_complist_len = len(d)
                tag = (nbt.load(buf=(d + '\x00\x00\x00\x00')))
                compounds.append(tag)

        if DEBUG_PE:
            try:
                open(dump_fName, 'a').write(
                    "**********\nLongest data length: %s\nData:\n%s\n" %
                    (longest_complist_len, longest_complist))
                open(dump_fName, 'a').write(
                    "**********\nShortest data length: %s\nData:\n%s\n" %
                    (shortest_complist_len, shortest_complist))
            except Exception, e:
                print "Could not write debug info:", e
Ejemplo n.º 6
0
    def __init__(self,
                 filename=None,
                 root_tag=None,
                 size=None,
                 mats=alphaMaterials):
        if not 'blockstateToID' in globals().keys():
            from materials import blockstateToID
        self._author = None
        self._blocks = None
        self._palette = None
        self._entities = []
        self._tile_entities = None
        self._size = None
        self._version = None
        self._mat = mats

        if filename:
            root_tag = nbt.load(filename)

        if root_tag:
            self._root_tag = root_tag
            self._size = (self._root_tag["size"][0].value,
                          self._root_tag["size"][1].value,
                          self._root_tag["size"][2].value)

            self._author = self._root_tag.get("author", nbt.TAG_String()).value
            self._version = self._root_tag.get("version", nbt.TAG_Int(1)).value
            self._version = self._root_tag.get("DataVersion",
                                               nbt.TAG_Int(1)).value

            self._palette = self.__toPythonPrimitive(self._root_tag["palette"])

            self._blocks = zeros(self.Size, dtype=tuple)
            self._blocks.fill((0, 0))
            self._entities = []
            self._tile_entities = zeros(self.Size, dtype=nbt.TAG_Compound)
            self._tile_entities.fill({})

            for block in self._root_tag["blocks"]:
                x, y, z = [p.value for p in block["pos"].value]
                self._blocks[x, y, z] = blockstateToID(
                    *self.get_state(block["state"].value))
                if "nbt" in block:
                    compound = nbt.TAG_Compound()
                    compound.update(block["nbt"])
                    self._tile_entities[x, y, z] = compound

            for e in self._root_tag["entities"]:
                entity = e["nbt"]
                entity["Pos"] = e["pos"]
                self._entities.append(entity)
        elif size:
            self._root_tag = nbt.TAG_Compound()
            self._size = size

            self._blocks = zeros(self.Size, dtype=tuple)
            self._blocks.fill((0, 0))
            self._entities = []
            self._tile_entities = zeros(self.Size, dtype=nbt.TAG_Compound)
            self._tile_entities.fill({})
Ejemplo n.º 7
0
    def saveGeneratedChunk(self, cx, cz, tempChunkBytes):
        """
        Chunks get generated using Anvil generation. This is a (slow) way of importing anvil chunk bytes
        and converting them to MCPE chunk data. Could definitely use some improvements, but at least it works.
        :param cx, cx: Coordinates of the chunk
        :param tempChunkBytes: str. Raw MCRegion chunk data.
        :return:
        """
        loaded_data = nbt.load(buf=tempChunkBytes)

        class fake:
            def __init__(self):
                self.Height = 128

        tempChunk = AnvilChunkData(fake(), (0, 0), loaded_data)

        if not self.containsChunk(cx, cz):
            self.createChunk(cx, cz)
            chunk = self.getChunk(cx, cz)
            chunk.Blocks = numpy.array(tempChunk.Blocks, dtype='uint16')
            chunk.Data = numpy.array(tempChunk.Data, dtype='uint8')
            chunk.SkyLight = numpy.array(tempChunk.SkyLight, dtype='uint8')
            chunk.BlockLight = numpy.array(tempChunk.BlockLight, dtype='uint8')

            chunk.dirty = True
            self.worldFile.saveChunk(chunk)
        else:
            logger.info("Tried to import generated chunk at %s, %s but the chunk already existed." % cx, cz)
Ejemplo n.º 8
0
def get_worlds():
    "Returns {world # or name : level.dat information}"
    ret = {}
    save_dir = get_save_dir()

    # No dirs found - most likely not running from inside minecraft-dir
    if save_dir is None:
        return None

    for dir in os.listdir(save_dir):
        world_dat = os.path.join(save_dir, dir, "level.dat")
        if not os.path.exists(world_dat):
            continue
        info = nbt.load(world_dat)[1]
        info["Data"]["path"] = os.path.join(save_dir, dir)
        if dir.startswith("World") and len(dir) == 6:
            try:
                world_n = int(dir[-1])
                ret[world_n] = info["Data"]
            except ValueError:
                pass
        if "LevelName" in info["Data"].keys():
            ret[info["Data"]["LevelName"]] = info["Data"]

    return ret
Ejemplo n.º 9
0
    def __init__(self, shape=None, root_tag=None, filename=None, mats="Alpha"):
        """ shape is (x,y,z) for a new level's shape.  if none, takes
        root_tag as a TAG_Compound for an existing schematic file.  if
        none, tries to read the tag from filename.  if none, results
        are undefined. materials can be a MCMaterials instance, or one of
        "Classic", "Alpha", "Pocket" to indicate allowable blocks. The default
        is Alpha.

        block coordinate order in the file is y,z,x to use the same code as classic/indev levels.
        in hindsight, this was a completely arbitrary decision.

        the Entities and TileEntities are nbt.TAG_List objects containing TAG_Compounds.
        this makes it easy to copy entities without knowing about their insides.

        rotateLeft swaps the axes of the different arrays.  because of this, the Width, Height, and Length
        reflect the current dimensions of the schematic rather than the ones specified in the NBT structure.
        I'm not sure what happens when I try to re-save a rotated schematic.
        """

        # if(shape != None):
        #    self.setShape(shape)

        if filename:
            self.filename = filename
            if None is root_tag and os.path.exists(filename):
                root_tag = nbt.load(filename)
        else:
            self.filename = None

        if mats in namedMaterials:
            self.materials = namedMaterials[mats]
        else:
            assert isinstance(mats, MCMaterials)
            self.materials = mats

        if root_tag:
            self.root_tag = root_tag
            if Materials in root_tag:
                self.materials = namedMaterials[self.Materials]
            else:
                root_tag[Materials] = nbt.TAG_String(self.materials.name)
            self.shapeChunkData()

        else:
            assert shape != None
            root_tag = nbt.TAG_Compound(name="Schematic")
            root_tag[Height] = nbt.TAG_Short(shape[1])
            root_tag[Length] = nbt.TAG_Short(shape[2])
            root_tag[Width] = nbt.TAG_Short(shape[0])

            root_tag[Entities] = nbt.TAG_List()
            root_tag[TileEntities] = nbt.TAG_List()
            root_tag["Materials"] = nbt.TAG_String(self.materials.name)

            root_tag[Blocks] = nbt.TAG_Byte_Array(zeros((shape[1], shape[2], shape[0]), uint8))
            root_tag[Data] = nbt.TAG_Byte_Array(zeros((shape[1], shape[2], shape[0]), uint8))

            self.root_tag = root_tag

        self.dataIsPacked = True
Ejemplo n.º 10
0
    def testErrors(self):
        """
        attempt to name elements of a TAG_List
        named list elements are not allowed by the NBT spec,
        so we must discard any names when writing a list.
        """

        level = self.testCreate()
        level["Map"]["Spawn"][0].name = "Torg Potter"
        sio = StringIO()
        level.save(buf=sio)
        newlevel = nbt.load(buf=sio.getvalue())

        n = newlevel["Map"]["Spawn"][0].name
        if n:
            print "Named list element failed: %s" % n
        """
        attempt to delete non-existent TAG_Compound elements
        this generates a KeyError like a python dict does.
        """
        level = self.testCreate()
        try:
            del level["DEADBEEF"]
        except KeyError:
            pass
        else:
            assert False
Ejemplo n.º 11
0
    def saveGeneratedChunk(self, cx, cz, tempChunkBytes):
        """
        Chunks get generated using Anvil generation. This is a (slow) way of importing anvil chunk bytes
        and converting them to MCPE chunk data. Could definitely use some improvements, but at least it works.
        :param cx, cx: Coordinates of the chunk
        :param tempChunkBytes: str. Raw MCRegion chunk data.
        :return:
        """
        loaded_data = nbt.load(buf=tempChunkBytes)

        class fake:
            def __init__(self):
                self.Height = 128

        tempChunk = AnvilChunkData(fake(), (0, 0), loaded_data)

        if not self.containsChunk(cx, cz):
            self.createChunk(cx, cz)
            chunk = self.getChunk(cx, cz)
            chunk.Blocks = numpy.array(tempChunk.Blocks, dtype='uint16')
            chunk.Data = numpy.array(tempChunk.Data, dtype='uint8')
            chunk.SkyLight = numpy.array(tempChunk.SkyLight, dtype='uint8')
            chunk.BlockLight = numpy.array(tempChunk.BlockLight, dtype='uint8')

            chunk.dirty = True
            self.worldFile.saveChunk(chunk)
        else:
            logger.info(
                "Tried to import generated chunk at %s, %s but the chunk already existed."
                % (cx, cz))
Ejemplo n.º 12
0
    def testErrors(self):
        """
        attempt to name elements of a TAG_List
        named list elements are not allowed by the NBT spec,
        so we must discard any names when writing a list.
        """

        level = self.testCreate()
        level["Map"]["Spawn"][0].name = "Torg Potter"
        data = level.save()
        newlevel = nbt.load(buf=data)

        n = newlevel["Map"]["Spawn"][0].name
        if n:
            print "Named list element failed: %s" % n

        # attempt to delete non-existent TAG_Compound elements
        # this generates a KeyError like a python dict does.
        level = self.testCreate()
        try:
            del level["DEADBEEF"]
        except KeyError:
            pass
        else:
            assert False
Ejemplo n.º 13
0
    def __init__(self, root_tag, filename):

        self.Blocks = array([[[alphaMaterials.Chest.ID]]], 'uint8')

        if filename:
            self.filename = filename
            if None is root_tag:
                try:
                    root_tag = nbt.load(filename)
                except IOError as e:
                    log.info(u"Failed to load file {0}".format(e))
                    raise
        else:
            assert root_tag, "Must have either root_tag or filename"
            self.filename = None

        for item in list(root_tag["Inventory"]):
            slot = item["Slot"].value
            if slot < 9 or slot >= 36:
                root_tag["Inventory"].remove(item)
            else:
                item[
                    "Slot"].value -= 9  # adjust for different chest slot indexes

        self.root_tag = root_tag
Ejemplo n.º 14
0
    def __init__(self, playerNBTFile):
        self.nbtFile = playerNBTFile
        self.nbtFileName = playerNBTFile.split("\\")[-1]
        self.root_tag = nbt.load(playerNBTFile)

        # Properties setup
        self._uuid = self.nbtFileName.split(".")[0]
        playerName = version_utils.getPlayerNameFromUUID(self._uuid)
        if playerName != self._uuid:
            self._name = playerName
        else:
            self._name = None
        self._gametype = self.root_tag["playerGameType"].value

        self._pos = [
            self.root_tag["Pos"][0].value, self.root_tag["Pos"][1].value,
            self.root_tag["Pos"][2].value
        ]
        self._rot = [
            self.root_tag["Rotation"][0].value,
            self.root_tag["Rotation"][1].value
        ]

        self._health = self.root_tag["Health"].value
        self._healf = self.root_tag["HealF"].value

        self._xp_level = self.root_tag["XpLevel"].value
Ejemplo n.º 15
0
    def testAnvilChunk(self):
        """ Test modifying, saving, and loading the new TAG_Int_Array heightmap
        added with the Anvil format.
        """
        chunk = nbt.load("testfiles/AnvilChunk.dat")

        hm = chunk["Level"]["HeightMap"]
        hm.value[2] = 500
        oldhm = numpy.array(hm.value)

        filename = mktemp("ChangedChunk")
        chunk.save(filename)
        changedChunk = nbt.load(filename)
        os.unlink(filename)

        eq = (changedChunk["Level"]["HeightMap"].value == oldhm)
        assert eq.all()
Ejemplo n.º 16
0
 def testSpeed(self):
     d = join("testfiles", "TileTicks_chunks")
     files = [join(d, f) for f in os.listdir(d)]
     startTime = time.time()
     for i in range(20):
         for f in files[:40]:
             n = nbt.load(f)
     print "Duration: ", time.time() - startTime
Ejemplo n.º 17
0
    def testBigEndianIntHeightMap(self):
        """ Test modifying, saving, and loading the new TAG_Int_Array heightmap
        added with the Anvil format.
        """
        chunk = nbt.load("testfiles/AnvilChunk.dat")

        hm = chunk["Level"]["HeightMap"]
        hm.value[2] = 500
        oldhm = numpy.array(hm.value)

        filename = mktemp("ChangedChunk")
        chunk.save(filename)
        changedChunk = nbt.load(filename)
        os.unlink(filename)

        eq = (changedChunk["Level"]["HeightMap"].value == oldhm)
        assert eq.all()
Ejemplo n.º 18
0
 def testSpeed(self):
     d = join("testfiles", "TileTicks_chunks")
     files = [join(d, f) for f in os.listdir(d)]
     startTime = time.time()
     for i in range(20):
         for f in files[:40]:
             n = nbt.load(f)
     print "Duration: ", time.time() - startTime
Ejemplo n.º 19
0
    def repair(self):
        lostAndFound = {}
        _freeSectors = [True] * len(self.freeSectors)
        _freeSectors[0] = _freeSectors[1] = False
        deleted = 0
        recovered = 0
        log.info("Beginning repairs on {file} ({chunks} chunks)".format(file=os.path.basename(self.path), chunks=sum(self.offsets > 0)))
        rx, rz = self.regionCoords
        for index, offset in enumerate(self.offsets):
            if offset:
                cx = index & 0x1f
                cz = index >> 5
                cx += rx << 5
                cz += rz << 5
                sectorStart = offset >> 8
                sectorCount = offset & 0xff
                try:

                    if sectorStart + sectorCount > len(self.freeSectors):
                        raise RegionMalformed("Offset {start}:{end} ({offset}) at index {index} pointed outside of the file".format(
                            start=sectorStart, end=sectorStart + sectorCount, index=index, offset=offset))

                    data = self.readChunk(cx, cz)
                    if data is None:
                        raise RegionMalformed("Failed to read chunk data for {0}".format((cx, cz)))

                    chunkTag = nbt.load(buf=data)
                    lev = chunkTag["Level"]
                    xPos = lev["xPos"].value
                    zPos = lev["zPos"].value
                    overlaps = False

                    for i in range(sectorStart, sectorStart + sectorCount):
                        if _freeSectors[i] is False:
                            overlaps = True
                        _freeSectors[i] = False

                    if xPos != cx or zPos != cz or overlaps:
                        lostAndFound[xPos, zPos] = data

                        if (xPos, zPos) != (cx, cz):
                            raise RegionMalformed("Chunk {found} was found in the slot reserved for {expected}".format(found=(xPos, zPos), expected=(cx, cz)))
                        else:
                            raise RegionMalformed("Chunk {found} (in slot {expected}) has overlapping sectors with another chunk!".format(found=(xPos, zPos), expected=(cx, cz)))

                except Exception as e:
                    log.info("Unexpected chunk data at sector {sector} ({exc})".format(sector=sectorStart, exc=e))
                    self.setOffset(cx, cz, 0)
                    deleted += 1

        for cPos, foundData in lostAndFound.items():
            cx, cz = cPos
            if self.getOffset(cx, cz) == 0:
                log.info("Found chunk {found} and its slot is empty, recovering it".format(found=cPos))
                self.saveChunk(cx, cz, foundData)
                recovered += 1

        log.info("Repair complete. Removed {0} chunks, recovered {1} chunks, net {2}".format(deleted, recovered, recovered - deleted))
Ejemplo n.º 20
0
    def testSpeed(self):
        d = join("testfiles", "TileTicks_chunks")
        files = [join(d, f) for f in os.listdir(d)]
        startTime = time.time()
        for f in files[:40]:
            n = nbt.load(f)
        duration = time.time() - startTime

        assert duration < 1.0  # Will fail when not using _nbt.pyx
Ejemplo n.º 21
0
 def _loadLevelDat(filename):
     root_tag_buf = open(filename, 'rb').read()
     magic, length, root_tag_buf = root_tag_buf[:4], root_tag_buf[4:8], root_tag_buf[8:]
     if struct.Struct('<i').unpack(magic)[0] < 3:
         logger.info("Found an old level.dat file. Aborting world load")
         raise InvalidPocketLevelDBWorldException()  # Maybe try convert/load old PE world?
     if len(root_tag_buf) != struct.Struct('<i').unpack(length)[0]:
         raise nbt.NBTFormatError()
     self.root_tag = nbt.load(buf=root_tag_buf)
Ejemplo n.º 22
0
    def testSpeed(self):
        d = join("testfiles", "TileTicks_chunks")
        files = [join(d, f) for f in os.listdir(d)]
        startTime = time.time()
        for f in files[:40]:
            n = nbt.load(f)
        duration = time.time() - startTime

        assert duration < 1.0 # Will fail when not using _nbt.pyx
Ejemplo n.º 23
0
    def find_true_spawn(self):
        """Adds the true spawn location to self.POI.  The spawn Y coordinate
        is almost always the default of 64.  Find the first air block above
        that point for the true spawn location"""

        ## read spawn info from level.dat
        data = nbt.load(os.path.join(self.worlddir, "level.dat"))[1]
        disp_spawnX = spawnX = data['Data']['SpawnX']
        spawnY = data['Data']['SpawnY']
        disp_spawnZ = spawnZ = data['Data']['SpawnZ']
        if self.north_direction == 'upper-left':
            temp = spawnX
            spawnX = -spawnZ
            spawnZ = temp
        elif self.north_direction == 'upper-right':
            spawnX = -spawnX
            spawnZ = -spawnZ
        elif self.north_direction == 'lower-right':
            temp = spawnX
            spawnX = spawnZ
            spawnZ = -temp
   
        ## The chunk that holds the spawn location 
        chunkX = spawnX/16
        chunkY = spawnZ/16
        
        ## clamp spawnY to a sane value, in-chunk value
        if spawnY < 0:
            spawnY = 0
        if spawnY > 127:
            spawnY = 127
        
        try:
            ## The filename of this chunk
            chunkFile = self.get_region_path(chunkX, chunkY)
            if chunkFile is not None:
                data = nbt.load_from_region(chunkFile, chunkX, chunkY, self.north_direction)
                if data is not None:
                    level = data[1]['Level']
                    blockArray = numpy.frombuffer(level['Blocks'], dtype=numpy.uint8).reshape((16,16,128))
                
                    ## The block for spawn *within* the chunk
                    inChunkX = spawnX - (chunkX*16)
                    inChunkZ = spawnZ - (chunkY*16)
                
                    ## find the first air block
                    while (blockArray[inChunkX, inChunkZ, spawnY] != 0):
                        spawnY += 1
                        if spawnY == 128:
                            break
        except chunk.ChunkCorrupt:
            #ignore corrupt spawn, and continue
            pass
        self.POI.append( dict(x=disp_spawnX, y=spawnY, z=disp_spawnZ,
                msg="Spawn", type="spawn", chunk=(chunkX, chunkY)))
        self.spawn = (disp_spawnX, spawnY, disp_spawnZ)
Ejemplo n.º 24
0
    def addSpawn(self):
        """Adds the true spawn location to self.POI."""

        ## read spawn info from level.dat
        data = nbt.load(os.path.join(self.worlddir, "level.dat"))[1]
        spawnX = data["Data"]["SpawnX"]
        spawnY = data["Data"]["SpawnY"]
        spawnZ = data["Data"]["SpawnZ"]

        self.POI.append(dict(x=spawnX, y=spawnY, z=spawnZ, msg="Spawn", id=0))
Ejemplo n.º 25
0
    def __init__(self, root_tag, filename):

        if filename:
            self.filename = filename
            if None is root_tag:
                try:
                    root_tag = nbt.load(filename)
                except IOError, e:
                    log.info(u"Failed to load file {0}".format(e))
                    raise
Ejemplo n.º 26
0
    def repair(self):
        lostAndFound = {}
        _freeSectors = [True] * len(self.freeSectors)
        _freeSectors[0] = _freeSectors[1] = False
        deleted = 0
        recovered = 0
        log.info("Beginning repairs on {file} ({chunks} chunks)".format(file=os.path.basename(self.path),
                                                                        chunks=sum(self.offsets > 0)))
        rx, rz = self.regionCoords
        for index, offset in enumerate(self.offsets):
            if offset:
                cx = index & 0x1f
                cz = index >> 5
                cx += rx << 5
                cz += rz << 5
                sectorStart = offset >> 8
                sectorCount = offset & 0xff
                try:

                    if sectorStart + sectorCount > len(self.freeSectors):
                        raise RegionMalformed(
                            "Offset {start}:{end} ({offset}) at index {index} pointed outside of the file".format(
                                start=sectorStart, end=sectorStart + sectorCount, index=index, offset=offset))

                    data = self.readChunk(cx, cz)
                    if data is None:
                        raise RegionMalformed("Failed to read chunk data for {0}".format((cx, cz)))

                    chunkTag = nbt.load(buf=data)
                    lev = chunkTag["Level"]
                    xPos = lev["xPos"].value
                    zPos = lev["zPos"].value
                    overlaps = False

                    for i in xrange(sectorStart, sectorStart + sectorCount):
                        if _freeSectors[i] is False:
                            overlaps = True
                        _freeSectors[i] = False

                    if xPos != cx or zPos != cz or overlaps:
                        lostAndFound[xPos, zPos] = data

                        if (xPos, zPos) != (cx, cz):
                            raise RegionMalformed(
                                "Chunk {found} was found in the slot reserved for {expected}".format(found=(xPos, zPos),
                                                                                                     expected=(cx, cz)))
                        else:
                            raise RegionMalformed(
                                "Chunk {found} (in slot {expected}) has overlapping sectors with another chunk!".format(
                                    found=(xPos, zPos), expected=(cx, cz)))

                except Exception, e:
                    log.info("Unexpected chunk data at sector {sector} ({exc})".format(sector=sectorStart, exc=e))
                    self.setOffset(cx, cz, 0)
                    deleted += 1
Ejemplo n.º 27
0
 def load(_data):
     sep = "\x00\x00\x00\x00\n"
     sep_data = _data.split(sep)
     compounds = []
     for d in sep_data:
         if len(d) != 0:
             if not d.startswith("\n"):
                 d = "\n" + d
             tag = (nbt.load(buf=(d + '\x00\x00\x00\x00')))
             compounds.append(tag)
     return compounds
Ejemplo n.º 28
0
 def _loadLevelDat(filename):
     root_tag_buf = open(filename, 'rb').read()
     magic, length, root_tag_buf = root_tag_buf[:4], root_tag_buf[
         4:8], root_tag_buf[8:]
     if struct.Struct('<i').unpack(magic)[0] < 3:
         logger.info("Found an old level.dat file. Aborting world load")
         raise InvalidPocketLevelDBWorldException(
         )  # Maybe try convert/load old PE world?
     if len(root_tag_buf) != struct.Struct('<i').unpack(length)[0]:
         raise nbt.NBTFormatError()
     self.root_tag = nbt.load(buf=root_tag_buf)
Ejemplo n.º 29
0
 def load(_data):
     sep = "\x00\x00\x00\x00\n"
     sep_data = _data.split(sep)
     compounds = []
     for d in sep_data:
         if len(d) != 0:
             if not d.startswith("\n"):
                 d = "\n" + d
             tag = (nbt.load(buf=(d + '\x00\x00\x00\x00')))
             compounds.append(tag)
     return compounds
Ejemplo n.º 30
0
    def setup(self):
        if not self._scs:
            self.root_tag = nbt.load(self.level.worldFolder.getFolderPath("data")+"/scoreboard.dat")
            for objective in self.root_tag["data"]["Objectives"]:
                self.objectives.append(Objective(objective))

            for team in self.root_tag["data"]["Teams"]:
                self.teams.append(Team(team))
        else:
            self.root_tag = nbt.TAG_Compound()
            self.root_tag["data"] = nbt.TAG_Compound()
Ejemplo n.º 31
0
    def __init__(self, root_tag, filename):

        self.Blocks = array([[[alphaMaterials.Chest.ID]]], 'uint8')

        if filename:
            self.filename = filename
            if None is root_tag:
                try:
                    root_tag = nbt.load(filename)
                except IOError, e:
                    log.info(u"Failed to load file {0}".format(e))
                    raise
Ejemplo n.º 32
0
 def findPlayerPosition(self):
     """Load player positions from players folder for all players who haven't been inactive for over a week"""
     playerfolder=os.path.join(self.worlddir, "players")
     playerList=os.listdir(playerfolder)
     now = time.time()
     for playerFile in playerList:
         absPlayerFile =os.path.join(playerfolder, playerFile)
         if os.path.getmtime(absPlayerFile)+60*60*24*7 < now:
             continue
         data = nbt.load(absPlayerFile)[1]
         pos = data['Pos']
         self.POI.append( dict(x=pos[0], y=pos[1], z=pos[2], msg=playerFile[0:-4]))
Ejemplo n.º 33
0
    def testLoad(self):
        "Load an indev level."
        level = nbt.load("testfiles/hell.mclevel")
        """The root tag must have a name, and so must any tag within a TAG_Compound"""
        print level.name
        """Use the [] operator to look up subtags of a TAG_Compound."""
        print level["Environment"]["SurroundingGroundHeight"].value
        """Numeric, string, and bytearray types have a value
  that can be accessed and changed. """
        print level["Map"]["Blocks"].value

        return level
Ejemplo n.º 34
0
    def setup(self):
        if not self._scs:
            self.root_tag = nbt.load(
                self.level.worldFolder.getFolderPath("data") +
                "/scoreboard.dat")
            for objective in self.root_tag["data"]["Objectives"]:
                self.objectives.append(Objective(objective))

            for team in self.root_tag["data"]["Teams"]:
                self.teams.append(Team(team))
        else:
            self.root_tag = nbt.TAG_Compound()
            self.root_tag["data"] = nbt.TAG_Compound()
Ejemplo n.º 35
0
    def __init__(self, worlddir, cachedir, chunklist=None, lighting=False, night=False, spawn=False, useBiomeData=False):
        self.worlddir = worlddir
        self.caves = False
        self.lighting = lighting or night or spawn
        self.night = night or spawn
        self.spawn = spawn
        self.cachedir = cachedir
        self.useBiomeData = useBiomeData

        # figure out chunk format is in use
        # if mcregion, error out early until we can add support
        data = nbt.load(os.path.join(self.worlddir, "level.dat"))[1]['Data']
        #print data
        if not ('version' in data and data['version'] == 19132):
            logging.error("Sorry, This version of Minecraft-Overviewer only works with the new McRegion chunk format")
            sys.exit(1)

        if self.useBiomeData:
            textures.prepareBiomeData(worlddir)

        self.chunklist = chunklist

        # In order to avoid having to look up the cache file names in
        # ChunkRenderer, get them all and store them here
        # TODO change how caching works
        for root, dirnames, filenames in os.walk(cachedir):
            for filename in filenames:
                if not filename.endswith('.png') or not filename.startswith("img."):
                    continue
                dirname, dir_b = os.path.split(root)
                _, dir_a = os.path.split(dirname)
                _, x, z, cave, _ = filename.split('.', 4)
                dir = '/'.join((dir_a, dir_b))
                bits = '.'.join((x, z, cave))
                cached[dir][bits] = os.path.join(root, filename)

        #  stores Points Of Interest to be mapped with markers
        #  a list of dictionaries, see below for an example
        self.POI = []

        # if it exists, open overviewer.dat, and read in the data structure
        # info self.persistentData.  This dictionary can hold any information
        # that may be needed between runs.
        # Currently only holds into about POIs (more more details, see quadtree)
        self.pickleFile = os.path.join(self.cachedir,"overviewer.dat")
        if os.path.exists(self.pickleFile):
            with open(self.pickleFile,"rb") as p:
                self.persistentData = cPickle.load(p)
        else:
            # some defaults
            self.persistentData = dict(POI=[])
Ejemplo n.º 36
0
    def __init__(self, worlddir, useBiomeData=False,regionlist=None):
        self.worlddir = worlddir
        self.useBiomeData = useBiomeData
                
        #find region files, or load the region list
        #this also caches all the region file header info
        logging.info("Scanning regions")
        regionfiles = {}
        self.regions = {}
        if regionlist:
            self.regionlist = map(os.path.abspath, regionlist) # a list of paths
        else:
            self.regionlist = None
        for x, y, regionfile in self._iterate_regionfiles():
            mcr = self.reload_region(regionfile) 
            mcr.get_chunk_info()
            regionfiles[(x,y)]	= (x,y,regionfile,mcr)
        self.regionfiles = regionfiles	
        # set the number of region file handles we will permit open at any time before we start closing them
#        self.regionlimit = 1000
        # the max number of chunks we will keep before removing them (includes emptry chunks)
        self.chunklimit = 1024 
        self.chunkcount = 0
        self.empty_chunk = [None,None]
        logging.debug("Done scanning regions")
        
        # figure out chunk format is in use
        # if not mcregion, error out early
        data = nbt.load(os.path.join(self.worlddir, "level.dat"))[1]['Data']
        #print data
        if not ('version' in data and data['version'] == 19132):
            logging.error("Sorry, This version of Minecraft-Overviewer only works with the new McRegion chunk format")
            sys.exit(1)

        #  stores Points Of Interest to be mapped with markers
        #  a list of dictionaries, see below for an example
        self.POI = []

        # if it exists, open overviewer.dat, and read in the data structure
        # info self.persistentData.  This dictionary can hold any information
        # that may be needed between runs.
        # Currently only holds into about POIs (more more details, see quadtree)
        # TODO maybe store this with the tiles, not with the world?
        self.pickleFile = os.path.join(self.worlddir, "overviewer.dat")
        if os.path.exists(self.pickleFile):
            with open(self.pickleFile,"rb") as p:
                self.persistentData = cPickle.load(p)
        else:
            # some defaults
            self.persistentData = dict(POI=[])
Ejemplo n.º 37
0
 def __init__(self, filename=None, root_tag=None, size=None, mats=alphaMaterials):
     self._author = None
     self._blocks = None
     self._palette = None
     self._entities = []
     self._tile_entities = None
     self._size = None
     self._version = None
     self._mat = mats
     
     if filename:
         root_tag = nbt.load(filename)
     
     if root_tag:
         self._root_tag = root_tag
         self._size = (self._root_tag["size"][0].value, self._root_tag["size"][1].value, self._root_tag["size"][2].value)
             
         self._author = self._root_tag.get("author", nbt.TAG_String()).value
         self._version = self._root_tag.get("version", nbt.TAG_Int(1)).value
         self._version = self._root_tag.get("DataVersion", nbt.TAG_Int(1)).value
             
         self._palette = self.__toPythonPrimitive(self._root_tag["palette"])
         
         self._blocks = zeros(self.Size, dtype=tuple)
         self._blocks.fill((0, 0))
         self._entities = []
         self._tile_entities = zeros(self.Size, dtype=nbt.TAG_Compound)
         self._tile_entities.fill({})
         
         for block in self._root_tag["blocks"]:
             x, y, z = [ p.value for p in block["pos"].value ]
             self._blocks[x, y, z] = blockstateToID(*self.get_state(block["state"].value))
             if "nbt" in block:
                 compound = nbt.TAG_Compound()
                 compound.update(block["nbt"])
                 self._tile_entities[x, y, z] = compound
                 
         for e in self._root_tag["entities"]:
             entity = e["nbt"]
             entity["Pos"] = e["pos"]
             self._entities.append(entity)
     elif size:
         self._root_tag = nbt.TAG_Compound()
         self._size = size
         
         self._blocks = zeros(self.Size, dtype=tuple)
         self._blocks.fill((0, 0))
         self._entities = []
         self._tile_entities = zeros(self.Size, dtype=nbt.TAG_Compound)
         self._tile_entities.fill({})
Ejemplo n.º 38
0
    def testLoad(self):
        "Load an indev level."
        level = nbt.load("testfiles/hell.mclevel")

        # The root tag must have a name, and so must any tag within a TAG_Compound
        print level.name

        # Use the [] operator to look up subtags of a TAG_Compound.
        print level["Environment"]["SurroundingGroundHeight"].value

        # Numeric, string, and bytearray types have a value that can be accessed and changed.
        print level["Map"]["Blocks"].value

        return level
Ejemplo n.º 39
0
    def findTrueSpawn(self):
        """Adds the true spawn location to self.POI.  The spawn Y coordinate
        is almost always the default of 64.  Find the first air block above
        that point for the true spawn location"""

        ## read spawn info from level.dat
        data = nbt.load(os.path.join(self.worlddir, "level.dat"))[1]
        disp_spawnX = spawnX = data["Data"]["SpawnX"]
        spawnY = data["Data"]["SpawnY"]
        disp_spawnZ = spawnZ = data["Data"]["SpawnZ"]
        if self.north_direction == "upper-left":
            temp = spawnX
            spawnX = -spawnZ
            spawnZ = temp
        elif self.north_direction == "upper-right":
            spawnX = -spawnX
            spawnZ = -spawnZ
        elif self.north_direction == "lower-right":
            temp = spawnX
            spawnX = spawnZ
            spawnZ = -temp

        ## The chunk that holds the spawn location
        chunkX = spawnX / 16
        chunkY = spawnZ / 16

        try:
            ## The filename of this chunk
            chunkFile = self.get_region_path(chunkX, chunkY)
            if chunkFile is not None:
                data = nbt.load_from_region(chunkFile, chunkX, chunkY, self.north_direction)[1]
                if data is not None:
                    level = data["Level"]
                    blockArray = numpy.frombuffer(level["Blocks"], dtype=numpy.uint8).reshape((16, 16, 128))

                    ## The block for spawn *within* the chunk
                    inChunkX = spawnX - (chunkX * 16)
                    inChunkZ = spawnZ - (chunkY * 16)

                    ## find the first air block
                    while blockArray[inChunkX, inChunkZ, spawnY] != 0:
                        spawnY += 1
                        if spawnY == 128:
                            break
        except ChunkCorrupt:
            # ignore corrupt spawn, and continue
            pass
        self.POI.append(dict(x=disp_spawnX, y=spawnY, z=disp_spawnZ, msg="Spawn", type="spawn", chunk=(chunkX, chunkY)))
        self.spawn = (disp_spawnX, spawnY, disp_spawnZ)
Ejemplo n.º 40
0
def get_worlds():
    "Returns {world # : level.dat information}"
    ret = {}
    save_dir = get_save_dir()

    # No dirs found - most likely not running from inside minecraft-dir
    if save_dir is None:
        return None

    for dir in os.listdir(save_dir):
        if dir.startswith("World") and len(dir) == 6:
            world_n = int(dir[-1])
            info = nbt.load(os.path.join(save_dir, dir, "level.dat"))[1]
            info['Data']['path'] = os.path.join(save_dir, dir)
            ret[world_n] = info['Data']

    return ret
Ejemplo n.º 41
0
 def getPlayerTag(self, player='Player'):
     """
     Obtains a player from the world.
     :param player: string of the name of the player. "Player" for SSP player, player_<client-id> for SMP player.
     :return: nbt.TAG_Compound, root tag of the player.
     """
     if player == '[No players]':  # Apparently this is being called somewhere?
         return None
     if player == 'Player':
         player = '~local_player'
     _player = self.playerTagCache.get(player)
     if _player is not None:
         return _player
     playerData = self.playerData[player]
     with nbt.littleEndianNBT():
         _player = nbt.load(buf=playerData)
         self.playerTagCache[player] = _player
     return _player
Ejemplo n.º 42
0
def identify(directory):
    if not (os.path.exists(os.path.join(directory, 'region'))
            or os.path.exists(os.path.join(directory, 'playerdata'))):
        return False
    if not (os.path.exists(os.path.join(directory, 'DIM1'))
            or os.path.exists(os.path.join(directory, 'DIM-1'))):
        return False
    if not (os.path.exists(os.path.join(directory, 'data'))
            or os.path.exists(os.path.join(directory, 'level.dat'))):
        return False
    root = nbt.load(os.path.join(directory, 'level.dat'))
    if 'FML' in root:
        return False
    if root.get('Data',
                nbt.TAG_Compound()).get('Version', nbt.TAG_Compound()).get(
                    'Id', nbt.TAG_Int(-1)).value < 1451:
        return False
    return True
Ejemplo n.º 43
0
 def getPlayerTag(self, player='Player'):
     """
     Obtains a player from the world.
     :param player: string of the name of the player. "Player" for SSP player, player_<client-id> for SMP player.
     :return: nbt.TAG_Compound, root tag of the player.
     """
     if player == '[No players]':  # Apparently this is being called somewhere?
         return None
     if player == 'Player':
         player = '~local_player'
     _player = self.playerTagCache.get(player)
     if _player is not None:
         return _player
     playerData = self.playerData[player]
     with nbt.littleEndianNBT():
         _player = nbt.load(buf=playerData)
         self.playerTagCache[player] = _player
     return _player
Ejemplo n.º 44
0
    def _getChunkFromFile(self, cx, cz):

        fp = open(self._path, 'rb+')

        cx &= 0x1f
        cz &= 0x1f

        chunk_offset = self.getOffset(cx, cz)
        if chunk_offset == 0:
            #print('Chunk does not exist')
            return

        sector_start = chunk_offset >> 8
        sector_nums = chunk_offset & 0xff

        if sector_nums == 0:
            #print('Chunk does not exist')
            return

        if sector_start + sector_nums > len(self._free_sectors):
            #print('Chunk does not exist')
            return

        fp.seek(sector_start * SECTOR_BYTES)
        data = fp.read(sector_nums * SECTOR_BYTES)

        if len(data) < 5:
            print('Chunk/Sector is malformed')
            return

        length = struct.unpack_from('>I', data)[0]
        _format = struct.unpack_from('B', data, 4)[0]
        data = data[5:length + 5]

        readable_data = None
        if _format == VERSION_GZIP:
            readable_data = nbt.gunzip(data)
            # print 'Chunk is in GZIP format'
        if _format == VERSION_DEFLATE:
            # print 'Chunk is in DEFLATE format'
            readable_data = zlib.decompress(data)

        fp.close()
        return BlockstateChunk(self.world, self, nbt.load(buf=readable_data))
Ejemplo n.º 45
0
    def __init__(self, playerNBTFile):
        self.nbtFile = playerNBTFile
        self.nbtFileName = playerNBTFile.split("\\")[-1]
        self.root_tag = nbt.load(playerNBTFile)

        # Properties setup
        self._uuid = self.nbtFileName.split(".")[0]
        playerName = version_utils.getPlayerNameFromUUID(self._uuid)
        if playerName != self._uuid:
            self._name = playerName
        else:
            self._name = None
        self._gametype = self.root_tag["playerGameType"].value
        
        self._pos = [self.root_tag["Pos"][0].value, self.root_tag["Pos"][1].value, self.root_tag["Pos"][2].value]
        self._rot = [self.root_tag["Rotation"][0].value, self.root_tag["Rotation"][1].value]

        self._health = self.root_tag["Health"].value
        self._healf = self.root_tag["HealF"].value

        self._xp_level = self.root_tag["XpLevel"].value
Ejemplo n.º 46
0
    def findTrueSpawn(self):
        """Adds the true spawn location to self.POI.  The spawn Y coordinate
        is almost always the default of 64.  Find the first air block above
        that point for the true spawn location"""

        ## read spawn info from level.dat
        data = nbt.load(os.path.join(self.worlddir, "level.dat"))[1]
        spawnX = data['Data']['SpawnX']
        spawnY = data['Data']['SpawnY']
        spawnZ = data['Data']['SpawnZ']

        ## The chunk that holds the spawn location
        chunkX = spawnX / 16
        chunkY = spawnZ / 16

        ## The filename of this chunk
        chunkFile = self.get_region_path(chunkX, chunkY)

        data = nbt.load_from_region(chunkFile, chunkX, chunkY)[1]
        level = data['Level']
        blockArray = numpy.frombuffer(level['Blocks'],
                                      dtype=numpy.uint8).reshape((16, 16, 128))

        ## The block for spawn *within* the chunk
        inChunkX = spawnX - (chunkX * 16)
        inChunkZ = spawnZ - (chunkY * 16)

        ## find the first air block
        while (blockArray[inChunkX, inChunkZ, spawnY] != 0):
            spawnY += 1
            if spawnY == 128:
                break

        self.POI.append(
            dict(x=spawnX,
                 y=spawnY,
                 z=spawnZ,
                 msg="Spawn",
                 type="spawn",
                 chunk=(inChunkX, inChunkZ)))
Ejemplo n.º 47
0
    def __init__(self, root_tag, filename):

        if filename:
            self.filename = filename
            if None is root_tag:
                try:
                    root_tag = nbt.load(filename)
                except IOError as e:
                    log.info("Failed to load file {0}".format(e))
                    raise
        else:
            assert root_tag, "Must have either root_tag or filename"
            self.filename = None

        for item in list(root_tag["Inventory"]):
            slot = item["Slot"].value
            if slot < 9 or slot >= 36:
                root_tag["Inventory"].remove(item)
            else:
                item["Slot"].value -= 9  # adjust for different chest slot indexes

        self.root_tag = root_tag
Ejemplo n.º 48
0
 def get_block_data(args):
     x, y, z = args[:3]
     b = args[3]
     nbt_data = None
     if len(args) == 5 and args[4] != None:
         f_name = os.path.join(os.path.dirname(filename), os.path.normpath(args[4]))
         if os.path.exists(f_name):
             nbt_data = nbt.load(f_name)
         else:
             print 'Could not find %s'%args[4]
             print '  Canonical path: %s'%f_name
     x = int(x) + self.delta_x
     y = int(y) + self.delta_y
     z = int(z) + self.delta_z
     if b != None:
         b_id, b_state = (b + ':0').split(':')[:2]
     else:
         b_id, b_state = '', None
     if b_state:
         b_state = int(b_state)
     else:
         b_state = 0
     return x, y, z, b_id, b_state, nbt_data
Ejemplo n.º 49
0
    def isLevel(cls, filename):
        """Tries to find out whether the given filename can be loaded
        by this class.  Returns True or False.

        Subclasses should implement _isLevel, _isDataLevel, or _isTagLevel.
        """
        if hasattr(cls, "_isLevel"):
            return cls._isLevel(filename)

        with file(filename) as f:
            data = f.read()

        if hasattr(cls, "_isDataLevel"):
            return cls._isDataLevel(data)

        if hasattr(cls, "_isTagLevel"):
            try:
                root_tag = nbt.load(filename, data)
            except:
                return False

            return cls._isTagLevel(root_tag)

        return False
Ejemplo n.º 50
0
 def get_block_data(args):
     x, y, z = args[:3]
     b = args[3]
     nbt_data = None
     if len(args) == 5 and args[4] != None:
         f_name = os.path.join(os.path.dirname(filename),
                               os.path.normpath(args[4]))
         if os.path.exists(f_name):
             nbt_data = nbt.load(f_name)
         else:
             print 'Could not find %s' % args[4]
             print '  Canonical path: %s' % f_name
     x = int(x) + self.delta_x
     y = int(y) + self.delta_y
     z = int(z) + self.delta_z
     if b != None:
         b_id, b_state = (b + ':0').split(':')[:2]
     else:
         b_id, b_state = '', None
     if b_state:
         b_state = int(b_state)
     else:
         b_state = 0
     return x, y, z, b_id, b_state, nbt_data
Ejemplo n.º 51
0
def get_worlds():
    "Returns {world # or name : level.dat information}"
    ret = {}
    save_dir = get_save_dir()

    # No dirs found - most likely not running from inside minecraft-dir
    if save_dir is None:
        return None

    for dir in os.listdir(save_dir):
        world_dat = os.path.join(save_dir, dir, "level.dat")
        if not os.path.exists(world_dat): continue
        info = nbt.load(world_dat)[1]
        info['Data']['path'] = os.path.join(save_dir, dir)
        if dir.startswith("World") and len(dir) == 6:
            try:
                world_n = int(dir[-1])
                ret[world_n] = info['Data']
            except ValueError:
                pass
        if 'LevelName' in info['Data'].keys():
            ret[info['Data']['LevelName']] = info['Data']

    return ret
Ejemplo n.º 52
0
    def isLevel(cls, filename):
        """Tries to find out whether the given filename can be loaded
        by this class.  Returns True or False.

        Subclasses should implement _isLevel, _isDataLevel, or _isTagLevel.
        """
        if hasattr(cls, "_isLevel"):
            return cls._isLevel(filename)

        with file(filename) as f:
            data = f.read()

        if hasattr(cls, "_isDataLevel"):
            return cls._isDataLevel(data)

        if hasattr(cls, "_isTagLevel"):
            try:
                root_tag = nbt.load(filename, data)
            except:
                return False

            return cls._isTagLevel(root_tag)

        return False
Ejemplo n.º 53
0
            u"Exception during Gzip operation, assuming {0} uncompressed: {1!r}"
            .format(filename, e))
        if unzippedData is None:
            compressed = False
            unzippedData = rawdata

    #data =
    data = unzippedData
    if MCJavaLevel._isDataLevel(data):
        log.info(u"Detected compressed Java-style level")
        lev = MCJavaLevel(filename, data)
        lev.compressed = compressed
        return lev

    try:
        root_tag = nbt.load(buf=data)

    except Exception, e:
        log.info(u"Error during NBT load: {0!r}".format(e))
        log.info(traceback.format_exc())
        log.info(
            u"Fallback: Detected compressed flat block array, yzx ordered ")
        try:
            lev = MCJavaLevel(filename, data)
            lev.compressed = compressed
            return lev
        except Exception, e2:
            raise LoadingError(("Multiple errors encountered", e, e2),
                               sys.exc_info()[2])

    else:
Ejemplo n.º 54
0
    def __init__(self, shape=None, root_tag=None, filename=None, mats='Alpha'):
        """ shape is (x,y,z) for a new level's shape.  if none, takes
        root_tag as a TAG_Compound for an existing schematic file.  if
        none, tries to read the tag from filename.  if none, results
        are undefined. materials can be a MCMaterials instance, or one of
        "Classic", "Alpha", "Pocket" to indicate allowable blocks. The default
        is Alpha.

        block coordinate order in the file is y,z,x to use the same code as classic/indev levels.
        in hindsight, this was a completely arbitrary decision.

        the Entities and TileEntities are nbt.TAG_List objects containing TAG_Compounds.
        this makes it easy to copy entities without knowing about their insides.

        rotateLeft swaps the axes of the different arrays.  because of this, the Width, Height, and Length
        reflect the current dimensions of the schematic rather than the ones specified in the NBT structure.
        I'm not sure what happens when I try to re-save a rotated schematic.
        """

        if filename:
            self.filename = filename
            if None is root_tag and os.path.exists(filename):
                root_tag = nbt.load(filename)
        else:
            self.filename = None

        if mats in namedMaterials:
            self.materials = namedMaterials[mats]
        else:
            assert (isinstance(mats, MCMaterials))
            self.materials = mats

        if root_tag:
            self.root_tag = root_tag
            if "Materials" in root_tag:
                self.materials = namedMaterials[self.Materials]
            else:
                root_tag["Materials"] = nbt.TAG_String(self.materials.name)

            w = self.root_tag["Width"].value
            l = self.root_tag["Length"].value
            h = self.root_tag["Height"].value

            self._Blocks = self.root_tag["Blocks"].value.astype('uint16').reshape(h, l, w)  # _Blocks is y, z, x
            del self.root_tag["Blocks"]
            if "AddBlocks" in self.root_tag:
                # Use WorldEdit's "AddBlocks" array to load and store the 4 high bits of a block ID.
                # Unlike Minecraft's NibbleArrays, this array stores the first block's bits in the
                # 4 high bits of the first byte.

                size = (h * l * w)

                # If odd, add one to the size to make sure the adjacent slices line up.
                add = zeros(size + (size & 1), 'uint16')

                # Fill the even bytes with data
                add[::2] = self.root_tag["AddBlocks"].value

                # Copy the low 4 bits to the odd bytes
                add[1::2] = add[::2] & 0xf

                # Shift the even bytes down
                add[::2] >>= 4

                # Shift every byte up before merging it with Blocks
                add <<= 8
                self._Blocks |= add[:size].reshape(h, l, w)
                del self.root_tag["AddBlocks"]

            self.root_tag["Data"].value = self.root_tag["Data"].value.reshape(h, l, w)

            if "Biomes" in self.root_tag:
                self.root_tag["Biomes"].value.shape = (l, w)

        else:
            assert shape is not None
            root_tag = nbt.TAG_Compound(name="Schematic")
            root_tag["Height"] = nbt.TAG_Short(shape[1])
            root_tag["Length"] = nbt.TAG_Short(shape[2])
            root_tag["Width"] = nbt.TAG_Short(shape[0])

            root_tag["Entities"] = nbt.TAG_List()
            root_tag["TileEntities"] = nbt.TAG_List()
            root_tag["Materials"] = nbt.TAG_String(self.materials.name)

            self._Blocks = zeros((shape[1], shape[2], shape[0]), 'uint16')
            root_tag["Data"] = nbt.TAG_Byte_Array(zeros((shape[1], shape[2], shape[0]), uint8))

            root_tag["Biomes"] = nbt.TAG_Byte_Array(zeros((shape[2], shape[0]), uint8))

            self.root_tag = root_tag

        self.root_tag["Data"].value &= 0xF  # discard high bits
Ejemplo n.º 55
0
def fromFile(filename, loadInfinite=True, readonly=False):
    ''' The preferred method for loading Minecraft levels of any type.
    pass False to loadInfinite if you'd rather not load infdev levels.
    '''
    log.info(u"Identifying " + filename)

    if not filename:
        raise IOError("File not found: " + filename)
    if not os.path.exists(filename):
        raise IOError("File not found: " + filename)

    if ZipSchematic._isLevel(filename):
        log.info("Zipfile found, attempting zipped infinite level")
        lev = ZipSchematic(filename)
        log.info("Detected zipped Infdev level")
        return lev

    if PocketWorld._isLevel(filename):
        return PocketWorld(filename)

    if MCInfdevOldLevel._isLevel(filename):
        log.info(u"Detected Infdev level.dat")
        if loadInfinite:
            return MCInfdevOldLevel(filename=filename, readonly=readonly)
        else:
            raise ValueError(
                "Asked to load {0} which is an infinite level, loadInfinite was False"
                .format(os.path.basename(filename)))

    if PocketLeveldbWorld._isLevel(filename):
        if leveldbpocket.leveldb_available:
            return PocketLeveldbWorld(filename)
        else:
            logging.exception("Pocket support has failed")

    if os.path.isdir(filename):
        logging.exception(
            "World load failed, trying to open a directory instead of a file")

    f = file(filename, 'rb')
    rawdata = f.read()
    f.close()
    if len(rawdata) < 4:
        raise ValueError("{0} is too small! ({1}) ".format(
            filename, len(rawdata)))

    data = fromstring(rawdata, dtype='uint8')
    if not data.any():
        raise ValueError(
            "{0} contains only zeroes. This file is damaged beyond repair.")

    if MCJavaLevel._isDataLevel(data):
        log.info(u"Detected Java-style level")
        lev = MCJavaLevel(filename, data)
        lev.compressed = False
        return lev

    # ungzdata = None
    compressed = True
    unzippedData = None
    try:
        unzippedData = nbt.gunzip(rawdata)
    except Exception as e:
        log.info(
            u"Exception during Gzip operation, assuming {0} uncompressed: {1!r}"
            .format(filename, e))
        if unzippedData is None:
            compressed = False
            unzippedData = rawdata

    #data =
    data = unzippedData
    if MCJavaLevel._isDataLevel(data):
        log.info(u"Detected compressed Java-style level")
        lev = MCJavaLevel(filename, data)
        lev.compressed = compressed
        return lev

    try:
        root_tag = nbt.load(buf=data)

    except Exception as e:
        log.info(u"Error during NBT load: {0!r}".format(e))
        log.info(traceback.format_exc())
        log.info(
            u"Fallback: Detected compressed flat block array, yzx ordered ")
        try:
            lev = MCJavaLevel(filename, data)
            lev.compressed = compressed
            return lev
        except Exception as e2:
            raise LoadingError(("Multiple errors encountered", e, e2),
                               sys.exc_info()[2])

    else:
        if MCIndevLevel._isTagLevel(root_tag):
            log.info(u"Detected Indev .mclevel")
            return MCIndevLevel(root_tag, filename)
        if MCSchematic._isTagLevel(root_tag):
            log.info(u"Detected Schematic.")
            return MCSchematic(filename=filename)

        if INVEditChest._isTagLevel(root_tag):
            log.info(u"Detected INVEdit inventory file")
            return INVEditChest(root_tag=root_tag, filename=filename)

    raise IOError("Cannot detect file type.")
Ejemplo n.º 56
0
 def loadLevelDat(self):
     self.root_tag = nbt.load(os.path.join(self.path, 'level.dat'))
     self.gameVersion = self.root_tag['Data']['Version'].get(
         'Name', nbt.TAG_String('Unknown')).value
Ejemplo n.º 57
0
    def __init__(self, shape=None, root_tag=None, filename=None, mats='Alpha'):
        """ shape is (x,y,z) for a new level's shape.  if none, takes
        root_tag as a TAG_Compound for an existing schematic file.  if
        none, tries to read the tag from filename.  if none, results
        are undefined. materials can be a MCMaterials instance, or one of
        "Classic", "Alpha", "Pocket" to indicate allowable blocks. The default
        is Alpha.

        block coordinate order in the file is y,z,x to use the same code as classic/indev levels.
        in hindsight, this was a completely arbitrary decision.

        the Entities and TileEntities are nbt.TAG_List objects containing TAG_Compounds.
        this makes it easy to copy entities without knowing about their insides.

        rotateLeft swaps the axes of the different arrays.  because of this, the Width, Height, and Length
        reflect the current dimensions of the schematic rather than the ones specified in the NBT structure.
        I'm not sure what happens when I try to re-save a rotated schematic.
        """

        # if(shape != None):
        #    self.setShape(shape)

        if filename:
            self.filename = filename
            if None is root_tag and os.path.exists(filename):
                root_tag = nbt.load(filename)
        else:
            self.filename = None

        if mats in namedMaterials:
            self.materials = namedMaterials[mats]
        else:
            assert (isinstance(mats, MCMaterials))
            self.materials = mats

        if root_tag:
            self.root_tag = root_tag
            if "Materials" in root_tag:
                self.materials = namedMaterials[self.Materials]
            else:
                root_tag["Materials"] = nbt.TAG_String(self.materials.name)
            self.shapeChunkData()

        else:
            assert shape is not None
            root_tag = nbt.TAG_Compound(name="Schematic")
            root_tag["Height"] = nbt.TAG_Short(shape[1])
            root_tag["Length"] = nbt.TAG_Short(shape[2])
            root_tag["Width"] = nbt.TAG_Short(shape[0])

            root_tag["Entities"] = nbt.TAG_List()
            root_tag["TileEntities"] = nbt.TAG_List()
            root_tag["Materials"] = nbt.TAG_String(self.materials.name)

            root_tag["Blocks"] = nbt.TAG_Byte_Array(
                zeros((shape[1], shape[2], shape[0]), uint8))
            root_tag["Data"] = nbt.TAG_Byte_Array(
                zeros((shape[1], shape[2], shape[0]), uint8))

            self.root_tag = root_tag

        self.packUnpack()
        self.root_tag["Data"].value &= 0xF  # discard high bits
Ejemplo n.º 58
0
def load_file():
    global test_file
    test_file = nbt.load(buf=test_data)