Esempio n. 1
0
 def _decompress(compress_type: int, data: bytes) -> nbt.NBTFile:
     """Convert a bytes object into an NBTFile"""
     if compress_type == world_utils.VERSION_GZIP:
         return nbt.load(buffer=gzip.decompress(data), compressed=False)
     elif compress_type == world_utils.VERSION_DEFLATE:
         return nbt.load(buffer=zlib.decompress(data), compressed=False)
     raise ChunkLoadError(f"Invalid compression type {compress_type}")
Esempio n. 2
0
    def read(self, section_index: int):
        if self._format_version == 0:
            (
                sx,
                sy,
                sz,
                shapex,
                shapey,
                shapez,
                position,
                length,
            ) = self._section_index_table[section_index]
            self._buffer.seek(position)
            nbt_obj = amulet_nbt.load(buffer=self._buffer.read(length))
            if nbt_obj["blocks_array_type"].value == -1:
                blocks = None
                block_entities = None
            else:
                blocks = numpy.reshape(nbt_obj["blocks"].value,
                                       (shapex, shapey, shapez))
                block_entities = self._parse_block_entities(
                    nbt_obj["block_entities"])

            return ConstructionSection(
                (sx, sy, sz),
                (shapex, shapey, shapez),
                blocks,
                self._palette,
                self._parse_entities(nbt_obj["entities"]),
                block_entities,
            )
        else:
            raise Exception(
                f"This wrapper doesn't support any construction version higher than {max_format_version}"
            )
Esempio n. 3
0
 def _load_level_dat(self):
     """Load the level.dat file and check the image file"""
     self.root_tag = nbt.load(filename=os.path.join(self.path, "level.dat"))
     if os.path.isfile(os.path.join(self.path, "icon.png")):
         self._world_image_path = os.path.join(self.path, "icon.png")
     else:
         self._world_image_path = self._missing_world_icon
Esempio n. 4
0
    def _load_palette_blocks(
        data, ) -> Tuple[numpy.ndarray, List[amulet_nbt.NBTFile], bytes]:
        # Ignore LSB of data (its a flag) and get compacting level
        bits_per_block, data = data[0] >> 1, data[1:]
        blocks_per_word = 32 // bits_per_block  # Word = 4 bytes, basis of compacting.
        word_count = -(-4096 // blocks_per_word
                       )  # Ceiling divide is inverted floor divide

        blocks = numpy.packbits(
            numpy.pad(
                numpy.unpackbits(
                    numpy.frombuffer(bytes(reversed(data[:4 * word_count])),
                                     dtype="uint8")).reshape(
                                         -1,
                                         32)[:, -blocks_per_word *
                                             bits_per_block:].reshape(
                                                 -1,
                                                 bits_per_block)[-4096:, :],
                [(0, 0), (16 - bits_per_block, 0)],
                "constant",
            )).view(dtype=">i2")[::-1]
        blocks = blocks.reshape((16, 16, 16)).swapaxes(1, 2)

        data = data[4 * word_count:]

        palette_len, data = struct.unpack("<I", data[:4])[0], data[4:]
        palette, offset = amulet_nbt.load(
            buffer=data,
            compressed=False,
            count=palette_len,
            offset=True,
            little_endian=True,
        )

        return blocks, palette, data[offset:]
Esempio n. 5
0
def load_leveldat(in_dir: str) -> nbt.NBTFile:
    """
    Load the root tag of the level.dat file in the directory

    :param in_dir: The world directory containing the level.dat file
    :return: The NBT root tag
    """
    return nbt.load(filename=os.path.join(in_dir, "level.dat"))
Esempio n. 6
0
 def _unpack_nbt_list(raw_nbt: bytes) -> List[amulet_nbt.NBTFile]:
     nbt_list = []
     while raw_nbt:
         nbt, index = amulet_nbt.load(buffer=raw_nbt,
                                      little_endian=True,
                                      offset=True)
         raw_nbt = raw_nbt[index:]
         nbt_list.append(nbt)
     return nbt_list
Esempio n. 7
0
 def _get_raw_player_data(self, player_id: str) -> nbt.NBTFile:
     if player_id == LOCAL_PLAYER:
         key = player_id.encode("utf-8")
     else:
         key = f"player_{player_id}".encode("utf-8")
     try:
         data = self._level_manager._db.get(key)
     except KeyError:
         raise PlayerDoesNotExist(f"Player {player_id} doesn't exist")
     return nbt.load(data, compressed=False, little_endian=True)
Esempio n. 8
0
 def __init__(self, path: str):
     with open(path, "rb") as f:
         self._level_dat_version = struct.unpack(
             "<i", f.read(4))[0]  # TODO: handle other versions
         assert (4 <= self._level_dat_version <=
                 8), f"Unknown level.dat version {self._level_dat_version}"
         data_length = struct.unpack("<i", f.read(4))[0]
         root_tag = nbt.load(buffer=f.read(data_length),
                             compressed=False,
                             little_endian=True)
     super().__init__(root_tag.value, root_tag.name)
Esempio n. 9
0
 def _get_raw_player_data(self, player_id: str) -> nbt.NBTFile:
     if player_id == LOCAL_PLAYER:
         if "Player" in self.root_tag["Data"]:
             return self.root_tag["Data"]["Player"]
         else:
             raise PlayerDoesNotExist("Local player doesn't exist")
     else:
         path = os.path.join(self.path, "playerdata", f"{player_id}.dat")
         if os.path.exists(path):
             return nbt.load(path)
         raise PlayerDoesNotExist(f"Player {player_id} does not exist")
 def _load_palette_blocks(
     self,
     data: bytes,
 ) -> Tuple[numpy.ndarray, List[amulet_nbt.NBTFile], bytes]:
     data, _, blocks = self._decode_packed_array(data)
     if blocks is not None:
         palette_len, data = struct.unpack("<I", data[:4])[0], data[4:]
         palette, offset = amulet_nbt.load(
             data,
             compressed=False,
             count=palette_len,
             offset=True,
             little_endian=True,
         )
         return blocks, palette, data[offset:]
     else:
         palette, offset = amulet_nbt.load(data,
                                           compressed=False,
                                           count=1,
                                           offset=True,
                                           little_endian=True)
         return numpy.zeros((16, 16, 16),
                            dtype=numpy.int16), palette, data[offset:]
Esempio n. 11
0
 def load_from(self, path: str):
     with open(path, "rb") as f:
         self._level_dat_version = struct.unpack("<i", f.read(4))[0]
         if 4 <= self._level_dat_version <= 9:
             data_length = struct.unpack("<i", f.read(4))[0]
             root_tag = nbt.load(f.read(data_length),
                                 compressed=False,
                                 little_endian=True)
             self.name = root_tag.name
             self.value = root_tag.value
         else:
             # TODO: handle other versions
             raise ObjectReadError(
                 f"Unsupported level.dat version {self._level_dat_version}")
Esempio n. 12
0
    def _init_read(self):
        """data to be read at init in read mode"""
        magic_num_1 = self._buffer.read(8)
        assert magic_num_1 == magic_num, f"This file is not a construction file."
        self._format_version = struct.unpack(">B", self._buffer.read(1))[0]
        if self._format_version == 0:
            self._buffer.seek(-magic_num_len, os.SEEK_END)
            magic_num_2 = self._buffer.read(8)
            assert (
                magic_num_2 == magic_num
            ), "It looks like this file is corrupt. It probably wasn't saved properly"

            self._buffer.seek(-magic_num_len - INT_STRUCT.size, os.SEEK_END)
            metadata_end = self._buffer.tell()
            metadata_start = INT_STRUCT.unpack(self._buffer.read(INT_STRUCT.size))[0]
            self._buffer.seek(metadata_start)

            metadata = amulet_nbt.load(
                buffer=self._buffer.read(metadata_end - metadata_start),
                compressed=True,
            )

            try:
                self._source_edition = metadata["export_version"]["edition"].value
                self._source_version = tuple(
                    map(lambda v: v.value, metadata["export_version"]["version"])
                )
            except KeyError as e:
                raise AssertionError(
                    f'Missing export version identifying key "{e.args[0]}"'
                )

            self._section_version = metadata["section_version"].value

            self._palette = self._unpack_palette(metadata["block_palette"])

            self._selection_boxes = (
                metadata["selection_boxes"].value.reshape(-1, 6).tolist()
            )

            self._section_index_table = (
                metadata["section_index_table"].value.view(SECTION_ENTRY_TYPE).tolist()
            )

        else:
            raise Exception(
                f"This wrapper doesn't support any construction version higher than {max_format_version}"
            )
Esempio n. 13
0
def main():
    data_dir = os.path.join(os.path.dirname(__file__), "data")
    input_dir = os.path.join(data_dir, "big_endian_compressed_nbt")

    for path in glob.glob(os.path.join(input_dir, "*.nbt")):
        fname = os.path.splitext(os.path.basename(path))[0]
        nbt = amulet_nbt.load(path)
        nbt.save_to(
            os.path.join(data_dir, "big_endian_nbt", fname + ".nbt"),
            compressed=False,
            little_endian=False,
        )
        nbt.save_to(
            os.path.join(data_dir, "little_endian_nbt", fname + ".nbt"),
            compressed=False,
            little_endian=True,
        )
        with open(os.path.join(data_dir, "snbt", fname + ".snbt"),
                  "w",
                  encoding="utf-8") as f:
            f.write(nbt.to_snbt("    "))
Esempio n. 14
0
    def _shallow_load(self):
        if os.path.isfile(self.path):
            with open(self.path, "rb") as f:
                magic_num_1 = f.read(magic_num_len)
                if magic_num_1 == magic_num:
                    format_version = struct.unpack(">B", f.read(1))[0]
                    if format_version == 0:
                        f.seek(-magic_num_len, os.SEEK_END)
                        magic_num_2 = f.read(magic_num_len)
                        if magic_num_2 == magic_num:
                            f.seek(-magic_num_len - INT_STRUCT.size,
                                   os.SEEK_END)
                            metadata_end = f.tell()
                            metadata_start = INT_STRUCT.unpack(
                                f.read(INT_STRUCT.size))[0]
                            f.seek(metadata_start)

                            metadata = amulet_nbt.load(
                                f.read(metadata_end - metadata_start),
                                compressed=True,
                            )

                            self._platform = metadata["export_version"][
                                "edition"].value
                            self._version = tuple(
                                map(
                                    lambda v: v.value,
                                    metadata["export_version"]["version"],
                                ))

                            selection_boxes = (
                                metadata["selection_boxes"].value.reshape(
                                    -1, 6).tolist())
                            self._bounds[self.dimensions[0]] = SelectionGroup([
                                SelectionBox((minx, miny, minz),
                                             (maxx, maxy, maxz)) for minx,
                                miny, minz, maxx, maxy, maxz in selection_boxes
                            ])
Esempio n. 15
0
    def open_from(self, f: BinaryIO):
        sponge_schem = amulet_nbt.load(f)
        version = sponge_schem.get("Version")
        if not isinstance(version, amulet_nbt.TAG_Int):
            raise SpongeSchemReadError(
                "Version key must exist and be an integer.")
        if version == 1:
            raise SpongeSchemReadError(
                "Sponge Schematic Version 1 is not supported currently.")
        elif version == 2:
            offset = sponge_schem.get("Offset")
            if isinstance(offset,
                          amulet_nbt.TAG_Int_Array) and len(offset) == 3:
                min_point = numpy.array(offset)
            else:
                min_point = numpy.array([0, 0, 0], dtype=numpy.int32)

            size = []
            for key in ("Width", "Height", "Length"):
                val = sponge_schem.get(key)
                if not isinstance(val, amulet_nbt.TAG_Short):
                    raise SpongeSchemReadError(
                        f"Key {key} must exist and be a TAG_Short.")
                # convert to an unsigned short
                val = val.value
                if val < 0:
                    val += 2**16
                size.append(val)

            max_point = min_point + size
            selection = SelectionBox(min_point, max_point)
            self._bounds[self.dimensions[0]] = SelectionGroup(selection)
            data_version = sponge_schem.get("DataVersion")
            if not isinstance(data_version, amulet_nbt.TAG_Int):
                raise SpongeSchemReadError("DataVersion must be a TAG_Int.")
            translator_version = self.translation_manager.get_version(
                "java", int(data_version))
            self._platform = translator_version.platform
            self._version = translator_version.data_version

            packed_block_data = sponge_schem.get("BlockData")
            if not isinstance(packed_block_data, amulet_nbt.TAG_Byte_Array):
                raise SpongeSchemReadError(
                    "BlockData must be a TAG_Byte_Array")

            unpacked_block_data = decode_byte_array(
                numpy.array(packed_block_data, dtype=numpy.uint8))
            if len(unpacked_block_data) != numpy.prod(size):
                raise SpongeSchemReadError(
                    "The data contained in BlockData does not match the size of the schematic."
                )
            dx, dy, dz = selection.shape
            blocks_array: numpy.ndarray = numpy.transpose(
                numpy.array(
                    unpacked_block_data,
                    dtype=numpy.uint32,
                ).reshape((dy, dz, dx)),
                (2, 0, 1),  # YZX => XYZ
            )

            if "Palette" not in sponge_schem:
                raise SpongeSchemReadError(
                    "Amulet is not able to read Sponge Schem files with no block palette."
                )

            palette_data = sponge_schem.get("Palette")
            if not isinstance(palette_data, amulet_nbt.TAG_Compound):
                raise SpongeSchemReadError("Palette must be a TAG_Compound.")

            block_palette: Dict[int, Block] = {}
            for blockstate, index in palette_data.items():
                if index.value in block_palette:
                    raise SpongeSchemReadError(
                        f"Duplicate block index {index} found in the palette.")
                block_palette[index.value] = Block.from_string_blockstate(
                    blockstate)

            if not numpy.all(numpy.isin(blocks_array, list(block_palette))):
                raise SpongeSchemReadError(
                    "Some values in BlockData were not present in Palette")

            for cx, cz in selection.chunk_locations():
                chunk_box = SelectionBox.create_chunk_box(
                    cx, cz).intersection(selection)
                array_slice = chunk_box.create_moved_box(selection.min,
                                                         subtract=True).slice
                chunk_blocks_: numpy.ndarray = blocks_array[array_slice]
                chunk_palette_indexes, chunk_blocks = numpy.unique(
                    chunk_blocks_,
                    return_inverse=True,
                )
                chunk_blocks = chunk_blocks.reshape(chunk_blocks_.shape)

                chunk_palette = numpy.empty(len(chunk_palette_indexes),
                                            dtype=object)
                for palette_index, index in enumerate(chunk_palette_indexes):
                    chunk_palette[palette_index] = block_palette[index]

                self._chunks[(cx, cz)] = (
                    chunk_box,
                    chunk_blocks,
                    chunk_palette,
                    [],
                    [],
                )

            if "BlockEntities" in sponge_schem:
                block_entities = sponge_schem["BlockEntities"]
                if (not isinstance(block_entities, amulet_nbt.TAG_List)
                        or block_entities.list_data_type !=
                        10  # amulet_nbt.TAG_Compound.tag_id
                    ):
                    raise SpongeSchemReadError(
                        "BlockEntities must be a TAG_List of compound tags.")

                for block_entity in block_entities:
                    if "Pos" in block_entity:
                        pos = block_entity["Pos"]
                        if isinstance(
                                pos,
                                amulet_nbt.TAG_Int_Array) and len(pos) == 3:
                            pos = pos + min_point
                            x, y, z = (
                                pos[0],
                                pos[1],
                                pos[2],
                            )
                            block_entity["Pos"] = amulet_nbt.TAG_Int_Array(pos)
                            cx, cz = x >> 4, z >> 4
                            if (cx, cz) in self._chunks and (
                                    x, y, z) in self._chunks[(cx, cz)][0]:
                                self._chunks[(cx, cz)][3].append(block_entity)

            if "Entities" in sponge_schem:
                entities = sponge_schem["Entities"]
                if (not isinstance(entities, amulet_nbt.TAG_List)
                        or entities.list_data_type !=
                        10  # amulet_nbt.TAG_Compound.tag_id
                    ):
                    raise SpongeSchemReadError(
                        "Entities must be a TAG_List of compound tags.")

                for entity in entities:
                    if "Pos" in entity:
                        pos = entity["Pos"]
                        if (isinstance(pos, amulet_nbt.TAG_List)
                                and len(pos) == 3 and pos.list_data_type
                                == 6):  # amulet_nbt.TAG_Double.tag_id:
                            x, y, z = (
                                pos[0].value + offset[0],
                                pos[1].value + offset[0],
                                pos[2].value + offset[0],
                            )
                            entity["Pos"] = amulet_nbt.TAG_List([
                                amulet_nbt.TAG_Int(x),
                                amulet_nbt.TAG_Int(y),
                                amulet_nbt.TAG_Int(z),
                            ])
                            cx, cz = numpy.floor([x, z]).astype(int) >> 4
                            if (cx, cz) in self._chunks and (
                                    x, y, z) in self._chunks[(cx, cz)][0]:
                                self._chunks[(cx, cz)][4].append(entity)

        else:
            raise SpongeSchemReadError(
                f"Sponge Schematic Version {version.value} is not supported currently."
            )
Esempio n. 16
0
                str(self.data_type_func(self.value_field.GetValue())))

    def get_selected_tag_type(self):
        for rd_btn in self.radio_buttons:
            if rd_btn.GetValue():
                return rd_btn.nbt_tag_class
        return None

    def save(self, evt):
        self.save_callback(
            self.name_field.GetValue(),
            self.data_type_func(self.value_field.GetValue()),
            self.get_selected_tag_type(),
            self.old_name,
        )

        self.Close()


if __name__ == "__main__":
    import wx.lib.inspection

    app = wx.App()
    wx.lib.inspection.InspectionTool().Show()
    frame = wx.Frame(None)
    NBTEditor(frame,
              nbt.load(buffer=NBT_FILE),
              callback=lambda nbt_data: print(nbt_data))
    frame.Show()
    app.MainLoop()
Esempio n. 17
0
    def open_from(self, f: BinaryIO):
        f = BytesIO(f.read())
        magic_num_1 = f.read(8)
        assert magic_num_1 == magic_num, f"This file is not a construction file."
        self._format_version = struct.unpack(">B", f.read(1))[0]
        if self._format_version == 0:
            f.seek(-magic_num_len, os.SEEK_END)
            magic_num_2 = f.read(8)
            assert (
                magic_num_2 == magic_num
            ), "It looks like this file is corrupt. It probably wasn't saved properly"

            f.seek(-magic_num_len - INT_STRUCT.size, os.SEEK_END)
            metadata_end = f.tell()
            metadata_start = INT_STRUCT.unpack(f.read(INT_STRUCT.size))[0]
            f.seek(metadata_start)

            metadata = amulet_nbt.load(
                f.read(metadata_end - metadata_start),
                compressed=True,
            )

            try:
                self._platform = metadata["export_version"]["edition"].value
                self._version = tuple(
                    map(lambda v: v.value, metadata["export_version"]["version"])
                )
            except KeyError as e:
                raise KeyError(f'Missing export version identifying key "{e.args[0]}"')

            self._section_version = metadata["section_version"].value

            palette = unpack_palette(metadata["block_palette"])

            selection_boxes = metadata["selection_boxes"].value.reshape(-1, 6).tolist()

            self._selection = SelectionGroup(
                [
                    SelectionBox((minx, miny, minz), (maxx, maxy, maxz))
                    for minx, miny, minz, maxx, maxy, maxz in selection_boxes
                ]
            )

            self._populate_chunk_to_box()

            section_index_table = (
                metadata["section_index_table"].value.view(SECTION_ENTRY_TYPE).tolist()
            )

            if self._section_version == 0:
                for (
                    start_x,
                    start_y,
                    start_z,
                    shape_x,
                    shape_y,
                    shape_z,
                    position,
                    length,
                ) in section_index_table:
                    f.seek(position)
                    nbt_obj = amulet_nbt.load(f.read(length))
                    if nbt_obj["blocks_array_type"].value == -1:
                        blocks = None
                        block_entities = None
                    else:
                        blocks = numpy.reshape(
                            nbt_obj["blocks"].value, (shape_x, shape_y, shape_z)
                        )
                        block_entities = parse_block_entities(nbt_obj["block_entities"])

                    start = numpy.array([start_x, start_y, start_z])
                    chunk_index: numpy.ndarray = start // self.sub_chunk_size
                    shape = numpy.array([shape_x, shape_y, shape_z])
                    if numpy.any(shape <= 0):
                        continue  # skip sections with zero size
                    if numpy.any(
                        start + shape > (chunk_index + 1) * self.sub_chunk_size
                    ):
                        log.error(
                            f"section in construction file did not fit in one sub-chunk. Start: {start}, Shape: {shape}"
                        )
                    cx, cy, cz = chunk_index.tolist()
                    self._chunk_to_section.setdefault((cx, cz), []).append(
                        ConstructionSection(
                            (start_x, start_y, start_z),
                            (shape_x, shape_y, shape_z),
                            blocks,
                            palette,
                            parse_entities(nbt_obj["entities"]),
                            block_entities,
                        )
                    )
            else:
                raise Exception(
                    f"This wrapper does not support any construction section version higher than {max_section_version}"
                )

        else:
            raise Exception(
                f"This wrapper does not support any construction format version higher than {max_format_version}"
            )
Esempio n. 18
0
 def open_from(self, f: BinaryIO):
     schematic = amulet_nbt.load(f)
     if any(key in schematic
            for key in ("Version", "Data Version", "BlockData")):
         raise ObjectReadError("This file is not a legacy schematic file.")
     materials = schematic.get("Materials", amulet_nbt.TAG_String()).value
     if materials == "Alpha":
         self._platform = "java"
         self._version = (1, 12, 2)
     elif materials == "Pocket":
         self._platform = "bedrock"
         self._version = (1, 12, 0)
     else:
         raise Exception(
             f'"{materials}" is not a supported platform for a schematic file.'
         )
     self._chunks = {}
     selection_box = SelectionBox(
         (0, 0, 0),
         (
             schematic["Width"].value,
             schematic["Height"].value,
             schematic["Length"].value,
         ),
     )
     self._selection = SelectionGroup(selection_box)
     entities: amulet_nbt.TAG_List = schematic.get("Entities",
                                                   amulet_nbt.TAG_List())
     block_entities: amulet_nbt.TAG_List = schematic.get(
         "TileEntities", amulet_nbt.TAG_List())
     blocks: numpy.ndarray = (schematic["Blocks"].value.astype(
         numpy.uint8).astype(numpy.uint16))
     if "AddBlocks" in schematic:
         add_blocks = schematic["AddBlocks"]
         blocks = (blocks + (numpy.concatenate([
             [(add_blocks & 0xF0) >> 4], [add_blocks & 0xF]
         ]).T.ravel().astype(numpy.uint16) << 8)[:blocks.size])
     max_point = selection_box.max
     temp_shape = (max_point[1], max_point[2], max_point[0])
     blocks = numpy.transpose(blocks.reshape(temp_shape),
                              (2, 0, 1))  # YZX => XYZ
     data = numpy.transpose(schematic["Data"].value.reshape(temp_shape),
                            (2, 0, 1)).astype(numpy.uint8)
     for cx, cz in selection_box.chunk_locations():
         box = SelectionBox(
             (cx * self.sub_chunk_size, 0, cz * self.sub_chunk_size),
             (
                 min((cx + 1) * self.sub_chunk_size, selection_box.size_x),
                 selection_box.size_y,
                 min((cz + 1) * self.sub_chunk_size, selection_box.size_z),
             ),
         )
         self._chunks[(cx, cz)] = (box, blocks[box.slice], data[box.slice],
                                   [], [])
     for e in block_entities:
         if all(key in e for key in ("x", "y", "z")):
             x, y, z = e["x"].value, e["y"].value, e["z"].value
             if (x, y, z) in selection_box:
                 cx = x >> 4
                 cz = z >> 4
                 self._chunks[(cx, cz)][3].append(e)
     for e in entities:
         if "Pos" in e:
             pos: PointCoordinates = tuple(
                 map(lambda t: float(t.value), e["Pos"].value))
             if pos in selection_box:
                 cx = int(pos[0]) >> 4
                 cz = int(pos[2]) >> 4
                 self._chunks[(cx, cz)][4].append(e)
Esempio n. 19
0
                str(self.data_type_func(self.value_field.GetValue())))

    def get_selected_tag_type(self):
        for rd_btn in self.radio_buttons:
            if rd_btn.GetValue():
                return rd_btn.nbt_tag_class
        return None

    def save(self, evt):
        self.save_callback(
            self.name_field.GetValue(),
            self.data_type_func(self.value_field.GetValue()),
            self.get_selected_tag_type(),
            self.old_name,
        )

        self.Close()


if __name__ == "__main__":
    import wx.lib.inspection

    app = wx.App()
    wx.lib.inspection.InspectionTool().Show()
    frame = wx.Frame(None)
    NBTEditor(frame,
              nbt.load(NBT_FILE),
              callback=lambda nbt_data: print(nbt_data))
    frame.Show()
    app.MainLoop()
Esempio n. 20
0
    def __init__(self, path_or_buffer: PathOrBuffer):
        if isinstance(path_or_buffer, str):
            assert path_or_buffer.endswith(
                ".schematic"), "File selected is not a .schematic file"
            assert os.path.isfile(
                path_or_buffer
            ), f"There is no schematic file at path {path_or_buffer}"
            schematic = amulet_nbt.load(path_or_buffer)
            assert not all(key in schematic for key in (
                "Version", "Data Version",
                "BlockData")), "This file is not a legacy schematic file."
        else:
            assert hasattr(path_or_buffer,
                           "read"), "Object does not have a read method"
            schematic = amulet_nbt.load(buffer=path_or_buffer)

        materials = schematic.get("Materials", amulet_nbt.TAG_String()).value
        if materials == "Alpha":
            self._platform = "java"
        elif materials == "Pocket":
            self._platform = "bedrock"
        else:
            raise Exception(
                f'"{materials}" is not a supported platform for a schematic file.'
            )
        self._chunks: Dict[ChunkCoordinates,
                           Tuple[SelectionBox, BlockArrayType,
                                 BlockDataArrayType, list, list], ] = {}
        self._selection = SelectionBox(
            (0, 0, 0),
            (
                schematic["Width"].value,
                schematic["Height"].value,
                schematic["Length"].value,
            ),
        )
        entities: amulet_nbt.TAG_List = schematic.get("Entities",
                                                      amulet_nbt.TAG_List())
        block_entities: amulet_nbt.TAG_List = schematic.get(
            "TileEntities", amulet_nbt.TAG_List())
        blocks: numpy.ndarray = schematic["Blocks"].value.astype(
            numpy.uint8).astype(numpy.uint16)
        if "AddBlocks" in schematic:
            add_blocks = schematic["AddBlocks"]
            blocks = blocks + (numpy.concatenate([
                [(add_blocks & 0xF0) >> 4], [add_blocks & 0xF]
            ]).T.ravel().astype(numpy.uint16) << 8)[:blocks.size]
        max_point = self._selection.max
        temp_shape = (max_point[1], max_point[2], max_point[0])
        blocks = numpy.transpose(blocks.reshape(temp_shape),
                                 (2, 0, 1))  # YZX => XYZ
        data = numpy.transpose(schematic["Data"].value.reshape(temp_shape),
                               (2, 0, 1))
        for cx, cz in self._selection.chunk_locations():
            box = SelectionBox(
                (cx * 16, 0, cz * 16),
                (
                    min((cx + 1) * 16, self._selection.size_x),
                    self._selection.size_y,
                    min((cz + 1) * 16, self._selection.size_z),
                ),
            )
            self._chunks[(cx, cz)] = (box, blocks[box.slice], data[box.slice],
                                      [], [])
        for e in block_entities:
            if all(key in e for key in ("x", "y", "z")):
                x, y, z = e["x"].value, e["y"].value, e["z"].value
                if (x, y, z) in self._selection:
                    cx = x >> 4
                    cz = z >> 4
                    self._chunks[(cx, cz)][3].append(e)
        for e in entities:
            if "Pos" in e:
                pos: PointCoordinates = tuple(
                    map(lambda t: t.value, e["Pos"].value))
                if pos in self._selection:
                    cx = int(pos[0]) >> 4
                    cz = int(pos[2]) >> 4
                    self._chunks[(cx, cz)][4].append(e)
Esempio n. 21
0
def main():
  args = parser.parse_args()

  world_paths = get_world_paths()

  if args.world_name not in world_paths:
    print(f'Could not find world by the name of "{args.world_name}"')
    exit(1)

  world_path = world_paths[args.world_name]

  output_folder = world_path
  if args.behavior_pack:
    behavior_packs_path = os.path.join(world_path, 'behavior_packs')
    behavior_pack = [ file.path for file in os.scandir(behavior_packs_path) if file.is_dir() ][0]
    if not behavior_pack: 
      print('Could not find behavior pack!')
      exit(1)
    else:
      output_folder = behavior_pack

  db_path = os.path.join(world_path, 'db')
  db = LevelDB(db_path)

  structures: StructureDict = {}

  structure_id = args.structure_id
  if structure_id != 'all':
    if ':' not in structure_id: structure_id = 'mystructure:' + structure_id

  for key, data in db.iterate():
    try:
      key_str = key.decode('ascii')
      if key_str.startswith('structuretemplate_'):
        str_id = key_str[len('structuretemplate_'):]

        structure = amulet_nbt.load(buffer=data, little_endian=True)
        structures[str_id] = structure

        if (str_id == structure_id or structure_id == 'all') and args.delete:
          # print(f'Deleted structure "{str_id}" from the leveldb database')
          db.delete(key)
    except: pass

  db.close()

  filtered_structures = {}
  if structure_id != 'all':
    if structure_id not in structures:
      print(f'Could not find structure with the id of "{structure_id}"! Available ids: {", ".join(structures.keys())}')
      exit(0)
      
    filtered_structures = { key: value for key, value in structures.items() if key == structure_id }
  elif structure_id == 'all':
    filtered_structures = structures

  if len(filtered_structures) == 0:
    print(f'No structures found!')
    exit(0)
  else:
    print(f'Preparing to save {", ".join(filtered_structures.keys())}')
  
  save_structures(output_folder, filtered_structures, args.force)
Esempio n. 22
0
    def __init__(self, path_or_buffer: Union[str, IO]):
        if isinstance(path_or_buffer, str):
            assert path_or_buffer.endswith(
                ".mcstructure"), "File selected is not a .mcstructure file"
            assert os.path.isfile(
                path_or_buffer
            ), f"There is no mcstructure file at path {path_or_buffer}"
            mcstructure = amulet_nbt.load(path_or_buffer, little_endian=True)
        else:
            assert hasattr(path_or_buffer,
                           "read"), "Object does not have a read method"
            mcstructure = amulet_nbt.load(buffer=path_or_buffer,
                                          little_endian=True)

        self._chunks: Dict[ChunkCoordinates,
                           Tuple[SelectionBox, numpy.ndarray, AnyNDArray,
                                 List[amulet_nbt.TAG_Compound],
                                 List[amulet_nbt.TAG_Compound], ], ] = {}
        if mcstructure["format_version"].value == 1:
            min_point = numpy.array(
                tuple(c.value for c in mcstructure["structure_world_origin"]))
            max_point = min_point + tuple(c.value for c in mcstructure["size"])
            self._selection = SelectionBox(min_point, max_point)
            blocks_array: numpy.ndarray = numpy.array(
                [[b.value for b in layer]
                 for layer in mcstructure["structure"]["block_indices"]],
                dtype=numpy.int32,
            ).reshape((len(mcstructure["structure"]["block_indices"]),
                       *self._selection.shape))

            palette_key = list(
                mcstructure["structure"]["block_palette"].keys())[
                    0]  # find a way to do this based on user input
            block_palette = list(mcstructure["structure"]["block_palette"]
                                 [palette_key]["block_palette"])

            for cx, cz in self._selection.chunk_locations():
                chunk_box = SelectionBox.create_chunk_box(cx, cz).intersection(
                    self._selection)
                array_slice = (slice(None), ) + chunk_box.create_moved_box(
                    self._selection.min, subtract=True).slice
                chunk_blocks_: numpy.ndarray = blocks_array[array_slice]
                chunk_palette_indexes, chunk_blocks = numpy.unique(
                    chunk_blocks_.reshape((chunk_blocks_.shape[0], -1)).T,
                    return_inverse=True,
                    axis=0,
                )
                chunk_blocks = chunk_blocks.reshape(chunk_blocks_.shape[1:])

                chunk_palette = numpy.empty(len(chunk_palette_indexes),
                                            dtype=object)
                for palette_index, indexes in enumerate(chunk_palette_indexes):
                    chunk_palette[palette_index] = tuple(block_palette[index]
                                                         for index in indexes
                                                         if index >= 0)

                self._chunks[(cx, cz)] = (
                    chunk_box,
                    chunk_blocks,
                    chunk_palette,
                    [],
                    [],
                )

            block_entities = {
                int(key): val["block_entity_data"]
                for key, val in mcstructure["structure"]["block_palette"]
                [palette_key]["block_position_data"].items()
                if "block_entity_data" in val
            }
            for location, block_entity in block_entities.items():
                if all(key in block_entity for key in "xyz"):
                    x, y, z = (
                        block_entity["x"].value,
                        block_entity["y"].value,
                        block_entity["z"].value,
                    )
                    cx, cz = x >> 4, z >> 4
                    if (cx, cz) in self._chunks and (x, y, z) in self._chunks[(
                            cx, cz)][0]:
                        self._chunks[(cx, cz)][3].append(block_entity)

            entities = list(mcstructure["structure"]["entities"])
            for entity in entities:
                if "Pos" in entity:
                    x, y, z = (
                        entity["Pos"][0].value,
                        entity["Pos"][1].value,
                        entity["Pos"][2].value,
                    )
                    cx, cz = numpy.floor([x, z]).astype(numpy.int) >> 4
                    if (cx, cz) in self._chunks and (x, y, z) in self._chunks[(
                            cx, cz)][0]:
                        self._chunks[(cx, cz)][4].append(entity)

        else:
            raise Exception(
                f"mcstructure file with format_version=={mcstructure['format_version'].value} cannot be read"
            )
Esempio n. 23
0
    def open_from(self, f: BinaryIO):
        mcstructure = amulet_nbt.load(f, little_endian=True)
        if mcstructure["format_version"].value == 1:
            min_point = numpy.array(
                tuple(c.value for c in mcstructure["structure_world_origin"])
            )
            max_point = min_point + tuple(c.value for c in mcstructure["size"])
            selection = SelectionBox(min_point, max_point)
            self._selection = SelectionGroup(selection)
            translator_version = self.translation_manager.get_version(
                "bedrock", (999, 999, 999)
            )
            self._platform = translator_version.platform
            self._version = translator_version.version_number
            blocks_array: numpy.ndarray = numpy.array(
                [
                    [b.value for b in layer]
                    for layer in mcstructure["structure"]["block_indices"]
                ],
                dtype=numpy.int32,
            ).reshape(
                (len(mcstructure["structure"]["block_indices"]), *selection.shape)
            )

            palette_key = list(mcstructure["structure"]["palette"].keys())[
                0
            ]  # find a way to do this based on user input
            block_palette = list(
                mcstructure["structure"]["palette"][palette_key]["block_palette"]
            )

            for cx, cz in selection.chunk_locations():
                chunk_box = SelectionBox.create_chunk_box(cx, cz).intersection(
                    selection
                )
                array_slice = (slice(None),) + chunk_box.create_moved_box(
                    selection.min, subtract=True
                ).slice
                chunk_blocks_: numpy.ndarray = blocks_array[array_slice]
                chunk_palette_indexes, chunk_blocks = numpy.unique(
                    chunk_blocks_.reshape((chunk_blocks_.shape[0], -1)).T,
                    return_inverse=True,
                    axis=0,
                )
                chunk_blocks = chunk_blocks.reshape(chunk_blocks_.shape[1:])

                chunk_palette = numpy.empty(len(chunk_palette_indexes), dtype=object)
                for palette_index, indexes in enumerate(chunk_palette_indexes):
                    chunk_palette[palette_index] = tuple(
                        block_palette[index] for index in indexes if index >= 0
                    )

                self._chunks[(cx, cz)] = (
                    chunk_box,
                    chunk_blocks,
                    chunk_palette,
                    [],
                    [],
                )

            block_entities = {
                int(key): val["block_entity_data"]
                for key, val in mcstructure["structure"]["palette"][palette_key][
                    "block_position_data"
                ].items()
                if "block_entity_data" in val
            }
            for location, block_entity in block_entities.items():
                if all(key in block_entity for key in "xyz"):
                    x, y, z = (
                        block_entity["x"].value,
                        block_entity["y"].value,
                        block_entity["z"].value,
                    )
                    cx, cz = x >> 4, z >> 4
                    if (cx, cz) in self._chunks and (x, y, z) in self._chunks[(cx, cz)][
                        0
                    ]:
                        self._chunks[(cx, cz)][3].append(block_entity)

            entities = list(mcstructure["structure"]["entities"])
            for entity in entities:
                if "Pos" in entity:
                    x, y, z = (
                        entity["Pos"][0].value,
                        entity["Pos"][1].value,
                        entity["Pos"][2].value,
                    )
                    cx, cz = numpy.floor([x, z]).astype(numpy.int) >> 4
                    if (cx, cz) in self._chunks and (x, y, z) in self._chunks[(cx, cz)][
                        0
                    ]:
                        self._chunks[(cx, cz)][4].append(entity)

        else:
            raise Exception(
                f"mcstructure file with format_version=={mcstructure['format_version'].value} cannot be read"
            )