Esempio n. 1
0
    def test_name_for_region(self):
        """
        From RegionFile.java's comments.
        """

        self.assertEqual(name_for_region(30, -3), "r.0.-1.mcr")
        self.assertEqual(name_for_region(70, -30), "r.2.-1.mcr")
Esempio n. 2
0
File: beta.py Progetto: miea/bravo
    def load_chunk(self, chunk):
        region = name_for_region(chunk.x, chunk.z)
        fp = self.folder.child("region").child(region)
        if not fp.exists():
            return

        x, z = chunk.x % 32, chunk.z % 32

        if region not in self.regions:
            self.cache_region_pages(region)

        positions = self.regions[region][0]

        if (x, z) not in positions:
            return

        position, pages = positions[x, z]

        if not position or not pages:
            return

        with fp.open("r") as handle:
            handle.seek(position * 4096)
            data = handle.read(pages * 4096)

        length = unpack(">L", data[:4])[0] - 1
        version = ord(data[4])

        data = data[5:length+5]
        if version == 1:
            data = data.decode("gzip")
            fileobj = GzipFile(fileobj=StringIO(data))
        elif version == 2:
            fileobj = StringIO(data.decode("zlib"))

        tag = NBTFile(buffer=fileobj)

        return self._load_chunk_from_tag(chunk, tag)
Esempio n. 3
0
File: beta.py Progetto: JDShu/bravo
    def load_chunk(self, chunk):
        region = name_for_region(chunk.x, chunk.z)
        fp = self.folder.child("region").child(region)
        if not fp.exists():
            return

        x, z = chunk.x % 32, chunk.z % 32

        if region not in self.regions:
            self.cache_region_pages(region)

        positions = self.regions[region][0]

        if (x, z) not in positions:
            return

        position, pages = positions[x, z]

        if not position or not pages:
            return

        with fp.open("r") as handle:
            handle.seek(position * 4096)
            data = handle.read(pages * 4096)

        length = unpack(">L", data[:4])[0] - 1
        version = ord(data[4])

        data = data[5:length+5]
        if version == 1:
            data = data.decode("gzip")
            fileobj = GzipFile(fileobj=StringIO(data))
        elif version == 2:
            fileobj = StringIO(data.decode("zlib"))

        tag = NBTFile(buffer=fileobj)

        return self._load_chunk_from_tag(chunk, tag)
Esempio n. 4
0
    def save_chunk(self, chunk):
        tag = self._save_chunk_to_tag(chunk)
        b = StringIO()
        tag.write_file(buffer=b)
        data = b.getvalue().encode("zlib")

        region = name_for_region(chunk.x, chunk.z)
        fp = self.folder.child("region")
        if not fp.exists():
            fp.makedirs()
        fp = fp.child(region)
        if not fp.exists():
            # Create the file and zero out the header, plus a spare page for
            # Notchian software.
            handle = fp.open("w")
            handle.write("\x00" * 8192)
            handle.close()

        if region not in self.regions:
            self.cache_region_pages(region)

        positions = self.regions[region][0]

        x, z = chunk.x % 32, chunk.z % 32

        if (x, z) in positions:
            position, pages = positions[x, z]
        else:
            position, pages = 0, 0

        # Pack up the data, all ready to go.
        data = "%s\x02%s" % (pack(">L", len(data) + 1), data)
        needed_pages = (len(data) + 4095) // 4096

        handle = fp.open("r+")

        # I should comment this, since it's not obvious in the original MCR
        # code either. The reason that we might want to reallocate pages if we
        # have shrunk, and not just grown, is that it allows the region to
        # self-vacuum somewhat by reusing single unused pages near the
        # beginning of the file. While this isn't an absolute guarantee, the
        # potential savings, and the guarantee that sometime during this
        # method we *will* be blocking, makes it worthwhile computationally.
        # This is a lot cheaper than an explicit vacuum, by the way!
        if not position or not pages or pages != needed_pages:
            free_pages = self.regions[region][1]

            # Deallocate our current home.
            for i in xrange(pages):
                free_pages.add(position + i)

            # Find a new home for us.
            found = False
            for candidate in sorted(free_pages):
                if all(candidate + i in free_pages
                    for i in range(needed_pages)):
                        # Excellent.
                        position = candidate
                        found = True
                        break

            # If we couldn't find a reusable run of pages, we should just go
            # to the end of the file.
            if not found:
                position = (fp.getsize() + 4095) // 4096

            # And allocate our new home.
            for i in xrange(needed_pages):
                free_pages.discard(position + i)

        pages = needed_pages

        positions[x, z] = position, pages

        # Write our payload.
        handle.seek(position * 4096)
        handle.write(data)

        # Write our position and page count.
        offset = 4 * (x + z * 32)
        position = position << 8 | pages
        handle.seek(offset)
        handle.write(pack(">L", position))
        handle.close()
Esempio n. 5
0
File: beta.py Progetto: JDShu/bravo
    def save_chunk(self, chunk):
        tag = self._save_chunk_to_tag(chunk)
        b = StringIO()
        tag.write_file(buffer=b)
        data = b.getvalue().encode("zlib")

        region = name_for_region(chunk.x, chunk.z)
        fp = self.folder.child("region")
        if not fp.exists():
            fp.makedirs()
        fp = fp.child(region)
        if not fp.exists():
            # Create the file and zero out the header, plus a spare page for
            # Notchian software.
            with fp.open("w") as handle:
                handle.write("\x00" * 8192)

        if region not in self.regions:
            self.cache_region_pages(region)

        positions = self.regions[region][0]

        x, z = chunk.x % 32, chunk.z % 32

        if (x, z) in positions:
            position, pages = positions[x, z]
        else:
            position, pages = 0, 0

        # Pack up the data, all ready to go.
        data = "%s\x02%s" % (pack(">L", len(data) + 1), data)
        needed_pages = (len(data) + 4095) // 4096

        # I should comment this, since it's not obvious in the original MCR
        # code either. The reason that we might want to reallocate pages if we
        # have shrunk, and not just grown, is that it allows the region to
        # self-vacuum somewhat by reusing single unused pages near the
        # beginning of the file. While this isn't an absolute guarantee, the
        # potential savings, and the guarantee that sometime during this
        # method we *will* be blocking, makes it worthwhile computationally.
        # This is a lot cheaper than an explicit vacuum, by the way!
        if not position or not pages or pages != needed_pages:
            free_pages = self.regions[region][1]

            # Deallocate our current home.
            for i in xrange(pages):
                free_pages.add(position + i)

            # Find a new home for us.
            found = False
            for candidate in sorted(free_pages):
                if all(candidate + i in free_pages
                    for i in range(needed_pages)):
                        # Excellent.
                        position = candidate
                        found = True
                        break

            # If we couldn't find a reusable run of pages, we should just go
            # to the end of the file.
            if not found:
                position = (fp.getsize() + 4095) // 4096

            # And allocate our new home.
            for i in xrange(needed_pages):
                free_pages.discard(position + i)

        pages = needed_pages

        positions[x, z] = position, pages

        # Write our payload.
        with fp.open("r+") as handle:
            handle.seek(position * 4096)
            handle.write(data)

            # Write our position and page count.
            offset = 4 * (x + z * 32)
            position = position << 8 | pages
            handle.seek(offset)
            handle.write(pack(">L", position))