Exemplo n.º 1
0
    def get_items(self, name):
        if name in self.cache and time.time(
        ) - self.cache[name]["last_update"] < 300:
            return self.cache[name]["items"]

        region_folder = os.path.join(
            self.world_folder,
            self.dimension_dict[self.storage_dict[name]["dimension"]])
        x1, y1, z1 = self.storage_dict[name]["pos1"]
        x2, y2, z2 = self.storage_dict[name]["pos2"]

        items = {}
        for region_x in range(x1 // 512, x2 // 512 + 1):
            for region_z in range(z1 // 512, z2 // 512 + 1):
                region = RegionFile(
                    os.path.join(region_folder,
                                 f"r.{region_x}.{region_z}.mca"))

                for chunk in region.iter_chunks():
                    try:
                        for tile_entity in chunk["Level"]["TileEntities"]:
                            if "Items" in tile_entity and x1 <= tile_entity[
                                    "x"].value <= x2 and y1 <= tile_entity[
                                        "y"].value <= y2 and z1 <= tile_entity[
                                            "z"].value <= z2:
                                items = self.get_items_from_nbt(
                                    tile_entity, items)

                    except KeyError:
                        continue

        self.cache[name] = {"last_update": time.time(), "items": items}

        return items
Exemplo n.º 2
0
 def setUp(self):
     data = b'\x00\x00\x02\x01' + 8188*b'\x00' + \
            b'\x00\x00\x00\x27\x02\x78\xda\xe3\x62\x60\x71\x49\x2c\x49\x64\x61\x60\x09\xc9\xcc\x4d' + \
            b'\x65\x80\x00\x46\x0e\x06\x16\xbf\x44\x20\x97\x25\x24\xb5\xb8\x84\x01\x00\x6b\xb7\x06\x52'
     self.length = 8235
     self.assertEqual(len(data), self.length)
     stream = BytesIO(data)
     stream.seek(0)
     self.region = RegionFile(fileobj=stream)
Exemplo n.º 3
0
def process_region_file(filename, start, stop):
    """Given a region filename, return the number of blocks of each ID in that file"""
    pieces = filename.split('.')
    rx = int(pieces[1])
    rz = int(pieces[2])

    block_data_totals = [[0] * 16 for i in range(256)
                         ]  # up to 16 data numbers in 256 block IDs

    # Does the region overlap the bounding box at all?
    if (start != None):
        if ((rx + 1) * 512 - 1 < int(start[0])
                or (rz + 1) * 512 - 1 < int(start[2])):
            return block_data_totals
    elif (stop != None):
        if (rx * 512 - 1 > int(stop[0]) or rz * 512 - 1 > int(stop[2])):
            return block_data_totals

    file = RegionFile(filename)

    # Get all chunks
    chunks = file.get_chunks()
    print("Parsing %s... %d chunks" %
          (os.path.basename(filename), len(chunks)))
    for c in chunks:
        # Does the chunk overlap the bounding box at all?
        if (start != None):
            if ((c['x'] + 1) * 16 + rx * 512 - 1 < int(start[0])
                    or (c['z'] + 1) * 16 + rz * 512 - 1 < int(start[2])):
                continue
        elif (stop != None):
            if (c['x'] * 16 + rx * 512 - 1 > int(stop[0])
                    or c['z'] * 16 + rz * 512 - 1 > int(stop[2])):
                continue

        chunk = Chunk(file.get_chunk(c['x'], c['z']))
        assert chunk.get_coords() == (c['x'] + rx * 32, c['z'] + rz * 32)
        #print("Parsing chunk ("+str(c['x'])+", "+str(c['z'])+")")
        # Parse the blocks

        # Fast code if no start or stop coordinates are specified
        # TODO: also use this code if start/stop is specified, but the complete chunk is included
        if (start == None and stop == None):
            stats_per_chunk(chunk, block_data_totals)
        else:
            # Slow code that iterates through each coordinate
            bounded_stats_per_chunk(chunk, block_data_totals, start, stop)

    return block_data_totals
Exemplo n.º 4
0
def main(world_path: str, check: bool = False):
    total_found = 0

    for world in WORLDS.keys():
        print(f"Checking the {world}")
        world_folder = AnvilWorldFolder(path.join(world_path, WORLDS[world]))

        regions = world_folder.regionfiles

        if len(regions) == 0:
            print(f"Couldn't find region files for the {world}, skipping")
            continue

        with Bar("Checking Regions", fill="█", max=len(regions)) as bar:
            for region_coords in regions.keys():
                region = RegionFile(regions[region_coords])
                chunks = region.get_metadata()

                for chunk in chunks:
                    chunk_x = region_coords[0] * 32 + chunk.x
                    chunk_z = region_coords[1] * 32 + chunk.z

                    nbt = world_folder.get_nbt(chunk_x, chunk_z)
                    found_errors = False
                    entities = nbt["Level"]["TileEntities"]

                    for entity in entities:
                        if not in_chunk(chunk_x, chunk_z, entity['x'],
                                        entity['z']):
                            total_found += 1
                            found_errors = True

                            # Move the entity to the (hopefully) right coordinates
                            entity["x"].value = chunk_x * 16 + (
                                to_int(entity['x']) % 16)
                            entity["z"].value = chunk_z * 16 + (
                                to_int(entity['z']) % 16)

                    if found_errors and not check:
                        region.write_chunk(chunk.x, chunk.z, nbt)

                bar.next()

        print(
            f"{ 'Found' if check else 'Fixed'} {total_found} entities with wrong coordinates"
        )
Exemplo n.º 5
0
    def analyse(self, region):
        logger.debug("Start Analse Region File %s", region.filename)
        # analysedChunks = {}
        try:
            region_file = RegionFile(region.path)
            # self.chunks = region_file
            for x in range(32):
                for z in range(32):
                    # start the actual chunk scanning
                    chunk = models.MCRegionFileChunk(region, x, z)
                    if chunk:
                        chunk.scan_results.extend(
                            self.scan_chunk(region_file, chunk))
                        region.chunks[(x, z)] = chunk

            # Now check for chunks sharing offsets:
            # Please note! region.py will mark both overlapping chunks
            # as bad (the one stepping outside his territory and the
            # good one). Only wrong located chunk with a overlapping
            # flag are really BAD chunks! Use this criterion to
            # discriminate
            metadata = region_file.metadata
            sharing = [
                k for k in metadata
                if (metadata[k].status == STATUS_CHUNK_OVERLAPPING
                    and region.chunks[k].scan_results.isErrorExists(
                        models.CHUNK_WRONG_LOCATED))
            ]
            shared_counter = 0
            for k in sharing:
                region.chunks[k].scan_results.append(
                    models.CHUNK_SHARED_OFFSET)
                region.chunks[k].scan_results.remove(
                    models.CHUNK_WRONG_LOCATED)
                shared_counter += 1

            region.shared_offset = shared_counter
            del region_file
        except NoRegionHeader:  # The region has no header
            region.status = models.REGION_TOO_SMALL
        except IOError:
            region.status = models.REGION_UNREADABLE

        region.scan_time = time.time()
        region.scanned = True
        return region
Exemplo n.º 6
0
def process_region(reg, callback):
    reg_nbt = RegionFile(reg)

    for m in reg_nbt.get_metadata():
        chunk = reg_nbt.get_chunk(m.x, m.z)
        level = chunk['Level']
        tile_entities = level['TileEntities']

        chunk_needs_update = False

        for ent in tile_entities:
            if ent['id'].value == 'minecraft:command_block':
                if callback(ent):
                    chunk_needs_update = True

        if chunk_needs_update:
            reg_nbt.write_chunk(m.x, m.z, chunk)
    
    reg_nbt.close()
Exemplo n.º 7
0
    def __init__(self, filename):
        print "Processing region file " + filename
        colormap = BlockColorMap()
        file = open(regionPath + filename, 'rb')
        region = RegionFile(fileobj=file)

        img = Image.new('RGBA', (32 * 16, 32 * 16))
        for c in region.get_chunks():
            cx = c['x']
            cz = c['z']
            chunk = Chunk(region, cx, cz)
            if chunk.status != 'postprocessed':
                continue
            for x in range(0, 16):
                for z in range(0, 16):
                    col = chunk.getColumnInfo(x, z)
                    color = colormap.getColor(col)
                    img.putpixel((cx * 16 + x, cz * 16 + z), color)
        img.save(outputPath + filename + ".png")

        file.close()
Exemplo n.º 8
0
def analyse_regionfile(filename, warnings=True):
    region = RegionFile(filename)

    statuscounts = Statuses()
    errors = []
    if region.size % 4096 != 0:
        errors.append(
            "File size is %d bytes, which is not a multiple of 4096" %
            region.size)
    sectorsize = region._bytes_to_sector(region.size)
    sectors = sectorsize * [None]
    if region.size == 0:
        errors.append("File size is 0 bytes")
        sectors = []
    elif sectorsize < 2:
        errors.append(
            "File size is %d bytes, too small for the 8192 byte header" %
            region.size)
    else:
        sectors[0] = "locations"
        sectors[1] = "timestamps"
    chunks = {}
    for x in range(32):
        for z in range(32):
            c = ChunkMetadata(x, z)
            (c.sectorstart, c.sectorlen, c.timestamp,
             status) = region.header[x, z]
            (c.length, c.compression, c.status) = region.chunk_headers[x, z]
            c.uncompressedlength = 0
            chunks[x, z] = c

            statuscounts.count(c.status)
            if c.status < 0:
                errors.append("chunk %d,%d has status %d: %s" % \
                    (x, z, c.status, statuscounts.get_name(c.status)))

            try:
                if c.sectorstart == 0:
                    if c.sectorlen != 0:
                        errors.append(
                            "chunk %d,%d is not created, but is %d sectors in length"
                            % (x, z, c.sectorlen))
                    if c.timestamp != 0:
                        errors.append(
                            "chunk %d,%d is not created, but has timestamp %d"
                            % (x, z, c.timestamp))
                    raise RegionFileFormatError('')
                allocatedbytes = 4096 * c.sectorlen
                if c.timestamp == 0:
                    errors.append("chunk %d,%d has no timestamp" % (x, z))
                if c.sectorstart < 2:
                    errors.append(
                        "chunk %d,%d starts at sector %d, which is in the header"
                        % (x, z, c.sectorstart))
                    raise RegionFileFormatError('')
                if 4096 * c.sectorstart >= region.size:
                    errors.append(
                        "chunk %d,%d starts at sector %d, while the file is only %d sectors"
                        % (x, z, c.sectorstart, sectorsize))
                    raise RegionFileFormatError('')
                elif 4096 * c.sectorstart + 5 > region.size:
                    # header of chunk only partially fits
                    errors.append(
                        "chunk %d,%d starts at sector %d, but only %d bytes of sector %d are present in the file"
                        % (x, z, c.sectorstart, sectorsize))
                    raise RegionFileFormatError('')
                elif not c.length:
                    errors.append("chunk %d,%d length is undefined." % (x, z))
                    raise RegionFileFormatError('')
                elif c.length == 1:
                    errors.append("chunk %d,%d has length 0 bytes." % (x, z))
                elif 4096 * c.sectorstart + 4 + c.length > region.size:
                    # header of chunk fits, but not the complete chunk
                    errors.append(
                        "chunk %d,%d is %d bytes in length, which is behind the file end"
                        % (x, z, c.length))
                requiredsectors = region._bytes_to_sector(c.length + 4)
                if c.sectorlen <= 0:
                    errors.append("chunk %d,%d is %d sectors in length" %
                                  (x, z, c.sectorlen))
                    raise RegionFileFormatError('')
                if c.compression == 0:
                    errors.append(
                        "chunk %d,%d is uncompressed. This is deprecated." %
                        (x, z))
                elif c.compression == 1:
                    errors.append(
                        "chunk %d,%d uses GZip compression. This is deprecated."
                        % (x, z))
                elif c.compression > 2:
                    errors.append(
                        "chunk %d,%d uses an unknown compression type (%d)." %
                        (x, z, c.compression))
                if c.length + 4 > allocatedbytes:  # TODO 4 or 5?
                    errors.append("chunk %d,%d is %d bytes (4+1+%d) and requires %d sectors, " \
                        "but only %d %s allocated" % \
                        (x, z, c.length+4, c.length-1, requiredsectors, c.sectorlen, \
                        "sector is" if (c.sectorlen == 1) else "sectors are"))
                elif c.length + 4 + 4096 == allocatedbytes:
                    # If the block fits in exactly n sectors, Minecraft seems to allocated n+1 sectors
                    # Threat this as a warning instead of an error.
                    if warnings:
                        errors.append("chunk %d,%d is %d bytes (4+1+%d) and requires %d %s, " \
                            "but %d sectors are allocated" % \
                            (x, z, c.length+4, c.length-1, requiredsectors, \
                            "sector" if (requiredsectors == 1) else "sectors", c.sectorlen))
                elif c.sectorlen > requiredsectors:
                    errors.append("chunk %d,%d is %d bytes (4+1+%d) and requires %d %s, " \
                        "but %d sectors are allocated" % \
                        (x, z, c.length+4, c.length-1, requiredsectors, \
                        "sector" if (requiredsectors == 1) else "sectors", c.sectorlen))

                # Decompress chunk, check if that succeeds.
                # Check if the header and footer indicate this is a NBT file.
                # (without parsing it in detail)
                compresseddata = None
                data = None
                try:
                    if 0 <= c.compression <= 2:
                        region.file.seek(4096 * c.sectorstart + 5)
                        compresseddata = region.file.read(c.length - 1)
                except Exception as e:
                    errors.append("Error reading chunk %d,%d: %s" %
                                  (x, z, str(e)))
                if (c.compression == 0):
                    data = compresseddata
                if (c.compression == 1):
                    try:
                        data = gzip.decompress(compresseddata)
                    except Exception as e:
                        errors.append(
                            "Error decompressing chunk %d,%d using gzip: %s" %
                            (x, z, str(e)))
                elif (c.compression == 2):
                    try:
                        data = zlib.decompress(compresseddata)
                    except Exception as e:
                        errors.append(
                            "Error decompressing chunk %d,%d using zlib: %s" %
                            (x, z, str(e)))
                if data:
                    c.uncompressedlength = len(data)
                    if data[0] != 10:
                        errors.append(
                            "chunk %d,%d is not a valid NBT file: outer object is not a TAG_Compound, but %r"
                            % (x, z, data[0]))
                    elif data[-1] != 0:
                        errors.append(
                            "chunk %d,%d is not a valid NBT file: files does not end with a TAG_End."
                            % (x, z))
                    else:
                        (length, ) = unpack(">H", data[1:3])
                        name = data[3:3 + length]
                        try:
                            name.decode("utf-8", "strict")
                        except Exception as e:
                            errors.append(
                                "Error decompressing chunk %d,%d using unknown compression: %s"
                                % (x, z, str(e)))

                if warnings:
                    # Read the unused bytes in a sector and check if all bytes are zeroed.
                    unusedlen = 4096 * c.sectorlen - (c.length + 4)
                    if unusedlen > 0:
                        try:
                            region.file.seek(4096 * c.sectorstart + 4 +
                                             c.length)
                            unused = region.file.read(unusedlen)
                            zeroes = unused.count(b'\x00')
                            if zeroes < unusedlen:
                                errors.append("%d of %d unused bytes are not zeroed in sector %d after chunk %d,%d" % \
                                    (unusedlen-zeroes, unusedlen, c.sectorstart + c.sectorlen - 1, x, z))
                        except Exception as e:
                            errors.append(
                                "Error reading tail of chunk %d,%d: %s" %
                                (x, z, str(e)))

            except RegionFileFormatError:
                pass

            if c.sectorlen and c.sectorstart:
                # Check for overlapping chunks
                for b in range(c.sectorlen):
                    m = "chunk %-2d,%-2d part %d/%d" % (x, z, b + 1,
                                                        c.sectorlen)
                    p = c.sectorstart + b
                    if p > sectorsize:
                        errors.append("%s outside file" % (m))
                        break
                    if sectors[p] != None:
                        errors.append("overlap in sector %d: %s and %s" %
                                      (p, sectors[p], m))
                    if (b == 0):
                        if (c.uncompressedlength > 0):
                            m += " (4+1+%d bytes compressed: %d bytes uncompressed)" % (
                                c.length - 1, c.uncompressedlength)
                        elif c.length:
                            m += " (4+1+%d bytes compressed)" % (c.length - 1)
                        else:
                            m += " (4+1+0 bytes)"
                    if sectors[p] != None:
                        m += " (overlapping!)"
                    sectors[p] = m

    e = sectors.count(None)
    if e > 0:
        if warnings:
            errors.append("Fragementation: %d of %d sectors are unused" %
                          (e, sectorsize))
        for sector, content in enumerate(sectors):
            if content == None:
                sectors[sector] = "empty"
                if warnings:
                    region.file.seek(4096 * sector)
                    unused = region.file.read(4096)
                    zeroes = unused.count(b'\x00')
                    if zeroes < 4096:
                        errors.append(
                            "%d bytes are not zeroed in unused sector %d" %
                            (4096 - zeroes, sector))

    return errors, statuscounts, sectors, chunks
Exemplo n.º 9
0
 def setUp(self):
     self.tempdir = tempfile.mkdtemp()
     self.filename = os.path.join(self.tempdir, 'regiontest.mca')
     shutil.copy(REGIONTESTFILE, self.filename)
     self.region = RegionFile(filename=self.filename)
Exemplo n.º 10
0
 def test02WriteFile(self):
     chunk = self.generate_level()
     region = RegionFile(fileobj=self.stream)
     region.write_chunk(0, 0, chunk)
     self.assertEqual(region.get_size(), 3 * 4096)
     self.assertEqual(region.chunk_count(), 1)
Exemplo n.º 11
0
 def test01ReadFile(self):
     region = RegionFile(fileobj=self.stream)
     self.assertEqual(region.chunk_count(), 0)