Beispiel #1
0
    def unpack_value(self, reader: BinaryReader) -> PropertyValueTyping:
        if self in {self.Bytes, self.String}:
            raw_data_size = reader.unpack_value("i")
            value = reader.read(raw_data_size)
            if self == PropertyType.String:
                # Assumed encoding (could also be "ascii").
                return value.decode("utf-8")  # str
            return value  # bytes

        if "Array" not in self.name:
            return reader.unpack_value(self.get_fmt())

        array_length, is_compressed, compressed_size = reader.unpack("III")
        if is_compressed:
            decompressed_size = self.get_size(array=True) * array_length
            decompressed = zlib.decompressobj().decompress(reader.read(compressed_size))
            if len(decompressed) != decompressed_size:
                raise ValueError(
                    f"FBX property decompressed data size ({len(decompressed)}) does not match expected size "
                    f"({decompressed_size})"
                )
            array_reader = BinaryReader(decompressed)
        else:
            array_reader = reader
        fmt = self.get_fmt(array_length)
        return list(array_reader.unpack(fmt))
Beispiel #2
0
    def unpack(self, reader: BinaryReader, **kwargs):
        reader.unpack_value("4s", asserted=b"TPF\0")
        self.platform = TPFPlatform(reader.unpack_value("B", offset=0xC))
        reader.byte_order = ">" if self.platform in {
            TPFPlatform.Xbox360, TPFPlatform.PS3
        } else "<"

        reader.unpack_value("i")  # data length
        file_count = reader.unpack_value("i")
        reader.unpack_value("B")  # platform
        self.tpf_flags = reader.unpack_value("B")
        if self.tpf_flags not in {0, 1, 2, 3}:
            raise ValueError(
                f"`TPF.tpf_flags` was {self.tpf_flags}, but expected 0, 1, 2, or 3."
            )
        self.encoding = reader.unpack_value("B")
        if self.encoding not in {0, 1, 2}:
            raise ValueError(
                f"`TPF.encoding` was {self.encoding}, but expected 0, 1, or 2."
            )
        reader.assert_pad(1)

        encoding = reader.get_utf_16_encoding(
        ) if self.encoding == 1 else "shift_jis_2004"
        self.textures = [
            TPFTexture.unpack_from(reader, self.platform, self.tpf_flags,
                                   encoding) for _ in range(file_count)
        ]
Beispiel #3
0
 def unpack_from(cls, reader: BinaryReader):
     float_struct = cls()
     float_struct.unk0 = reader.unpack_value("i")
     length = reader.unpack_value("i")
     if length < 0 or length % 4:
         raise ValueError(
             f"Unexpected `FloatStruct` length: {length}. Expected a multiple of 4 (or 0)."
         )
     float_struct.values = list(reader.unpack(f"{length // 4}f"))
Beispiel #4
0
 def unpack(self, reader: BinaryReader):
     self.gx_items = []
     while reader.unpack_value("<i", offset=reader.position) not in {2 ** 31 - 1, -1}:
         self.gx_items.append(GXItem(reader))
     self.terminator_id = reader.unpack_value("<i")  # either 2 ** 31 - 1 or -1
     reader.unpack_value("<i", asserted=100)
     self.terminator_null_count = reader.unpack_value("<i") - 12
     terminator_nulls = reader.read(self.terminator_null_count)
     if terminator_nulls.strip(b"\0"):
         raise ValueError(f"Found non-null data in terminator: {terminator_nulls}")
Beispiel #5
0
 def _check_big_endian_and_struct_64(gnl_reader: BinaryReader):
     """Guessed based on the number and position of zero bytes the first offset."""
     gnl_reader.seek(0)
     # First two bytes of first offset should be zeroes if big-endian.
     big_endian = gnl_reader.unpack_value("h") == 0
     if big_endian:
         # Remainder of first half of first offset should be zeroes if 64-bit.
         use_struct_64 = gnl_reader.unpack_value("h") == 0
     else:
         # Second half of first offset should be zeroes if 64-bit.
         gnl_reader.seek(4)
         use_struct_64 = gnl_reader.unpack_value("i") == 0
     gnl_reader.seek(0)
     return big_endian, use_struct_64
Beispiel #6
0
    def __init__(self, data: bytes):
        reader = BinaryReader(data)

        reader.unpack_value("4s", asserted=b"DDS ")
        reader.unpack_value("i", asserted=0x7C)
        self.flags = reader.unpack_value("I")
        self.height = reader.unpack_value("i")
        self.width = reader.unpack_value("i")
        self.pitch_or_linear_size = reader.unpack_value("i")
        self.depth = reader.unpack_value("i")
        self.mipmap_count = reader.unpack_value("i")
        self.reserved_1 = reader.unpack("11i")

        # TODO: More here (see SoulsFormats excerpt below), but I care mainly about width/height right now.
        """
Beispiel #7
0
 def _check_use_struct_64(reader: BinaryReader, goal_count):
     if goal_count == 0:
         raise LuaError(
             "Cannot detect `LuaInfo` version if no goals are present.")
     elif goal_count >= 2:
         return reader.unpack_value("i", offset=0x24) == 0
     else:
         # Hacky check if there's only one goal.
         if reader.unpack_value("i", offset=0x18) == 0x28:
             return True
         if reader.unpack_value("i", offset=0x14) == 0x20:
             return False
         raise ValueError(
             "Found unexpected data while trying to detect `LuaInfo` version from single goal."
         )
Beispiel #8
0
 def read(cls, reader: BinaryReader, bit_big_endian: bool) -> BinderFlags:
     """Read a byte, reverse it if necessary, and return flags integer."""
     flags = cls(reader.unpack_value("B"))
     if not bit_big_endian and not (flags.is_big_endian
                                    and not flags.has_flag_7):
         flags = cls(int(f"{flags:08b}"[::-1], 2))
     return flags
Beispiel #9
0
def get_instruction_args(reader: BinaryReader, category, index,
                         first_arg_offset, event_args_size, emedf: dict):
    """Process instruction arguments (required and optional) from EMEVD binary."""

    try:
        emedf_args_info = emedf[category, index]["args"]
    except KeyError:
        raise KeyError(
            f"Could not find instruction ({category}, {index}) in `Instruction.EMEDF`."
        )
    previous_offset = reader.position
    if event_args_size == 0:
        return "", []
    try:
        args_format = "@" + "".join(arg["internal_type"].get_fmt()
                                    for arg in emedf_args_info.values())
    except KeyError:
        raise KeyError(
            f"Cannot find argument types for instruction {category}[{index:02d}] ({event_args_size} bytes)"
        )

    # 's' arguments are actually four-byte offsets into the packed string data, though we will keep the 's' symbol.
    struct_args_format = args_format.replace("s", "I")
    required_args_size = struct.calcsize(struct_args_format)
    if required_args_size > event_args_size:
        raise ValueError(
            f"Documented size of minimum required args for instruction {category}"
            f"[{index}] is {required_args_size}, but size of args specified in EMEVD file is "
            f"only {event_args_size}.")

    reader.seek(first_arg_offset)
    args = reader.unpack(struct_args_format)

    # Additional arguments may appear for the instruction 2000[00], 'RunEvent'. These instructions are tightly packed
    # and are always aligned to 4. We read them here as unsigned integers and must actually parse the called event ID to
    # interpret them properly (done at `EMEVD` class level).

    extra_size = event_args_size - required_args_size

    opt_arg_count = extra_size // 4
    if opt_arg_count == 0:
        reader.seek(previous_offset)
        return args_format[1:], list(args)
    elif (category, index) not in _OPTIONAL_ARGS_ALLOWED:
        raise ValueError(
            f"Extra arguments found for instruction {category}[{index}], which is not permitted. Arg types may be "
            f"wrong (too short) for this instruction.\n"
            f"    required size = {required_args_size}\n"
            f"    actual size = {event_args_size}")
    elif extra_size % 4 != 0:
        raise ValueError(
            f"Error interpreting instruction {category}[{index}]: optional argument "
            f"size is not a multiple of four bytes ({extra_size}).")

    opt_args = [reader.unpack_value("<I") for _ in range(opt_arg_count)]
    reader.seek(previous_offset)
    return args_format[1:] + "|" + "I" * (extra_size //
                                          4), list(args) + opt_args
Beispiel #10
0
 def _check_big_endian(reader: BinaryReader):
     with reader.temp_offset(4):
         endian = reader.unpack_value("i")
     if endian == 0x1000000:
         return True
     elif endian == 0x1:
         return False
     raise ValueError(
         f"Invalid marker for LuaInfo byte order: {hex(endian)}")
Beispiel #11
0
 def from_bnd3_reader(cls, reader: BinaryReader, binder_flags: BinderFlags, bit_big_endian: bool):
     flags = BinderEntryFlags.read(reader, bit_big_endian)
     reader.assert_pad(3)
     compressed_size = reader.unpack_value("i")
     data_offset = reader.unpack_value("q" if binder_flags.has_long_offsets else "I")
     entry_id = reader.unpack_value("i") if binder_flags.has_ids else None
     if binder_flags.has_names:
         path_offset = reader.unpack_value("i")
         path = reader.unpack_string(path_offset, encoding="shift-jis")  # NOT `shift_jis_2004`
     else:
         path = None
     uncompressed_size = reader.unpack_value("i") if binder_flags.has_compression else None
     return cls(
         flags=flags,
         compressed_size=compressed_size,
         entry_id=entry_id,
         path=path,
         uncompressed_size=uncompressed_size,
         data_offset=data_offset,
     )
Beispiel #12
0
 def read(self, reader: BinaryReader, bit_count: int, fmt: str):
     max_bit_count = 8 * struct.calcsize(fmt)
     if self._field == "" or fmt != self._fmt or self._offset + bit_count > max_bit_count:
         # Consume (and reverse) new bit field. Any previous bit field is discarded.
         integer = reader.unpack_value(fmt)
         self._field = format(integer, f"0{max_bit_count}b")[::-1]
         self._fmt = fmt
     binary_str = self._field[self._offset:self._offset + bit_count][::-1]
     self._offset += bit_count
     if self._offset % max_bit_count == 0:  # read new field next time
         self._field = ""
         self._offset = 0
     return int(binary_str, 2)
Beispiel #13
0
    def unpack(cls, reader: BinaryReader, property_type_byte: int = None):
        """Unpack `FBXProperty` from given `reader`.

        If `property_type_byte=None` (default), it will be read from the first byte of `reader` first. Otherwise, that
        byte will be assumed as already read.
        """
        if property_type_byte is None:
            property_type_byte = reader.unpack_value("<B")
        try:
            property_type = PropertyType(property_type_byte)  # type: PropertyType
        except ValueError:
            raise ValueError(
                f"Unsupported property type read: {property_type_byte} ({chr(property_type_byte)})"
            )
        value = property_type.unpack_value(reader)
        return cls(value, property_type)
Beispiel #14
0
def get_instruction_args(reader: BinaryReader, category, index,
                         first_arg_offset, event_args_size, format_dict):
    """Process instruction arguments (required and optional) from EMEVD binary."""

    previous_offset = reader.position
    if event_args_size == 0:
        return "", []
    try:
        args_format = "@" + format_dict[category][index]
    except KeyError:
        raise KeyError(
            f"Cannot find argument types for instruction {category}[{index:02d}]."
        )

    # 's' arguments are actually four-byte offsets into the packed string data, though we will keep the 's' symbol.
    struct_args_format = args_format.replace("s", "I")
    required_args_size = struct.calcsize(struct_args_format)
    if required_args_size > event_args_size:
        raise ValueError(
            f"Documented size of minimum required args for instruction {category}"
            f"[{index}] is {required_args_size}, but size of args specified in EMEVD file is "
            f"only {event_args_size}.")

    reader.seek(first_arg_offset)
    args = reader.unpack(struct_args_format)

    # Additional arguments may appear for the instruction 2000[00], 'RunEvent'. These instructions are tightly packed
    # and are always aligned to 4. We read them here as unsigned integers and must actually parse the called event ID to
    # interpret them properly (done at `EMEVD` class level).

    extra_size = event_args_size - required_args_size

    opt_arg_count = extra_size // 4
    if opt_arg_count == 0:
        reader.seek(previous_offset)
        return args_format[1:], list(args)
    elif extra_size % 4 != 0:
        raise ValueError(
            f"Error interpreting instruction {category}[{index}]: optional argument "
            f"size is not a multiple of four bytes ({extra_size}).")

    opt_args = [reader.unpack_value("<I") for _ in range(opt_arg_count)]
    reader.seek(previous_offset)
    return args_format[1:] + "|" + "I" * (extra_size //
                                          4), list(args) + opt_args
Beispiel #15
0
    def detect(cls, reader: BinaryReader) -> DCXType:
        """Detect type of DCX. Resets offset when done."""
        old_offset = reader.tell()

        dcx_type = cls.Unknown

        magic = reader.unpack_value("4s")
        if magic == b"DCP\0":  # rare, only for older games and DeS test maps
            # Possible file pattern for DFLT or EDGE compression.
            dcx_fmt = reader.unpack_value("4s", offset=4)
            if dcx_fmt == b"DCP\0":
                dcx_type = cls.DCP_DFLT
            elif dcx_fmt == b"EDGE":
                dcx_type = cls.DCP_EDGE
        elif magic == b"DCX\0":
            dcx_fmt = reader.unpack_value("4s", offset=0x28)
            if dcx_fmt == b"EDGE":
                dcx_type = cls.DCX_EDGE
            elif dcx_fmt == b"DFLT":
                # Check four unknown header fields to determine DFLT subtype.
                unk04 = reader.unpack_value("i", offset=0x4)
                unk10 = reader.unpack_value("i", offset=0x10)
                unk30 = reader.unpack_value("i", offset=0x30)
                unk38 = reader.unpack_value("B", offset=0x38)
                if unk10 == 0x24:
                    dcx_type = cls.DCX_DFLT_10000_24_9
                elif unk10 == 0x44:
                    if unk04 == 0x10000:
                        dcx_type = cls.DCX_DFLT_10000_44_9
                    elif unk04 == 0x11000:
                        if unk30 == 0x8000000:
                            dcx_type = cls.DCX_DFLT_11000_44_8
                        elif unk30 == 0x9000000:
                            if unk38 == 15:
                                dcx_type = cls.DCX_DFLT_11000_44_9_15
                            elif unk38 == 0:
                                dcx_type = cls.DCX_DFLT_11000_44_9
            elif dcx_fmt == b"KRAK":  # requires `oo2core_6_win64.dll`
                dcx_type = cls.DCX_KRAK
        else:
            b0 = reader.unpack_value("B", offset=0)
            b1 = reader.unpack_value("B", offset=1)
            if b0 == 0x78 and (b1 in {0x01, 0x5E, 0x9C, 0xDA}):
                dcx_type = cls.Zlib

        reader.seek(old_offset)
        return dcx_type
Beispiel #16
0
 def unpack_header(self, reader: BinaryReader) -> int:
     self.big_endian = reader.unpack_value("?", offset=0xD)
     reader.byte_order = ">" if self.big_endian else "<"
     self.bit_big_endian = reader.unpack_value("?", offset=0xE)
     reader.unpack_value("4s", asserted=b"BND3")
     self.signature = reader.unpack_value("8s").decode("ascii").rstrip("\0")
     self.flags = BinderFlags.read(reader, self.bit_big_endian)
     reader.byte_order = ">" if self.big_endian or self.flags.is_big_endian else "<"
     reader.seek(2, 1)  # skip peeked endian bytes
     reader.assert_pad(1)
     entry_count = reader.unpack_value("i")
     reader.seek(12, 1)  # skip file size
     return entry_count
Beispiel #17
0
    def unpack(self, msb_reader: BinaryReader):
        region_offset = msb_reader.position
        base_data = msb_reader.unpack_struct(self.REGION_STRUCT)
        self.name = msb_reader.unpack_string(
            offset=region_offset + base_data["name_offset"],
            encoding=self.NAME_ENCODING,
        )
        self._region_index = base_data["__region_index"]
        self.translate = Vector3(base_data["translate"])
        self.rotate = Vector3(base_data["rotate"])
        self.check_null_field(msb_reader,
                              region_offset + base_data["unknown_offset_1"])
        self.check_null_field(msb_reader,
                              region_offset + base_data["unknown_offset_2"])

        if base_data["type_data_offset"] != 0:
            msb_reader.seek(region_offset + base_data["type_data_offset"])
            self.unpack_type_data(msb_reader)

        msb_reader.seek(region_offset + base_data["entity_id_offset"])
        self.entity_id = msb_reader.unpack_value("i")

        return region_offset + base_data["entity_id_offset"]
Beispiel #18
0
 def from_bnd4_reader(cls, reader: BinaryReader, binder_flags: BinderFlags, bit_big_endian: bool, unicode: bool):
     flags = BinderEntryFlags.read(reader, bit_big_endian)
     reader.assert_pad(3)
     assert reader.unpack_value("i") == -1
     compressed_size = reader.unpack_value("q")
     uncompressed_size = reader.unpack_value("q") if binder_flags.has_compression else None
     data_offset = reader.unpack_value("q" if binder_flags.has_long_offsets else "I")
     entry_id = reader.unpack_value("i") if binder_flags.has_ids else None
     if binder_flags.has_names:
         path_offset = reader.unpack_value("I")
         path = reader.unpack_string(path_offset, encoding="utf-16-le" if unicode else "shift-jis")
     else:
         path = None
     return cls(
         flags=flags,
         compressed_size=compressed_size,
         entry_id=entry_id,
         path=path,
         uncompressed_size=uncompressed_size,
         data_offset=data_offset,
     )
Beispiel #19
0
    def unpack_header(self, reader: BinaryReader):
        reader.unpack_value("4s", asserted=b"BND4")
        self.unknown1 = reader.unpack_value("?")
        self.unknown2 = reader.unpack_value("?")
        reader.assert_pad(3)
        self.big_endian = reader.unpack_value("?")
        self.bit_big_endian = not reader.unpack_value("?")  # note reversal
        reader.assert_pad(1)

        reader.byte_order = ">" if self.big_endian else "<"  # no need to check flags for an override in BND4

        entry_count = reader.unpack_value("i")
        reader.unpack_value("q", asserted=0x40)  # header size
        self.signature = reader.unpack_value("8s").decode("ascii").rstrip("\0")
        entry_header_size = reader.unpack_value("q")
        data_offset = reader.unpack_value(
            "q")  # end of all headers, including hash table
        self.unicode = reader.unpack_value("?")
        self.flags = BinderFlags.read(reader, self.bit_big_endian)
        self.hash_table_type = reader.unpack_value("B")
        reader.assert_pad(5)
        hash_table_offset = reader.unpack_value("q")

        flags_header_size = self.flags.get_bnd_entry_header_size()
        if entry_header_size != flags_header_size:
            raise ValueError(
                f"Expected BND entry header size {flags_header_size} based on flags\n"
                f"{self.flags:08b}, but BND header says {entry_header_size}.")
        if self.hash_table_type != 4 and hash_table_offset != 0:
            _LOGGER.warning(
                f"Found non-zero hash table offset {hash_table_offset}, but header says this BHD has no hash "
                f"table.")

        entry_headers = [
            BinderEntryHeader.from_bnd4_reader(reader, self.flags,
                                               self.bit_big_endian,
                                               self.unicode)
            for _ in range(entry_count)
        ]

        if self.hash_table_type == 4:
            # Save the initial hash table.
            reader.seek(hash_table_offset)
            self._most_recent_hash_table = reader.read(data_offset -
                                                       hash_table_offset)

        return entry_headers
Beispiel #20
0
 def _is_dcx(reader: BinaryReader) -> bool:
     """Checks if file data starts with DCX (or DCP) magic."""
     return reader.unpack_value("4s", offset=0) in {b"DCP\0", b"DCX\0"}
Beispiel #21
0
    def unpack_from(
        cls,
        reader: BinaryReader,
        platform: TPFPlatform,
        tpf_flags: int,
        encoding: str,
        tpf_path: tp.Union[None, str, Path] = None,
    ):
        self = cls()
        self.tpf_path = tpf_path

        file_offset = reader.unpack_value("I")
        file_size = reader.unpack_value("i")

        self.format = reader.unpack_value("B")
        self.texture_type = TextureType(reader.unpack_value("B"))
        self.mipmaps = reader.unpack_value("B")
        self.texture_flags = reader.unpack_value("B")
        if self.texture_flags not in {0, 1, 2, 3}:
            raise ValueError(
                f"`TPFTexture.flags1` was {self.texture_flags}, but expected 0, 1, 2, or 3."
            )

        if platform != TPFPlatform.PC:
            self.header = TextureHeader
            self.header.width = reader.unpack_value("h")
            self.header.height = reader.unpack_value("h")
            if platform == TPFPlatform.Xbox360:
                reader.assert_pad(4)
            elif platform == TPFPlatform.PS3:
                self.header.unk1 = reader.unpack_value("i")
                if tpf_flags != 0:
                    self.header.unk2 = reader.unpack_value("i")
                    if self.header.unk2 not in {0, 0x68E0, 0xAAE4}:
                        raise ValueError(
                            f"`TextureHeader.unk2` was {self.header.unk2}, but expected 0, 0x68E0, or 0xAAE4."
                        )
            elif platform in {TPFPlatform.PS4, TPFPlatform.XboxOne}:
                self.header.texture_count = reader.unpack_value("i")
                if self.header.texture_count not in {1, 6}:
                    f"`TextureHeader.texture_count` was {self.header.texture_count}, but expected 1 or 6."
                self.header.unk2 = reader.unpack_value("i")
                if self.header.unk2 != 0xD:
                    f"`TextureHeader.unk2` was {self.header.unk2}, but expected 0xD."

        name_offset = reader.unpack_value("I")
        has_float_struct = reader.unpack_value("i") == 1
        if platform in {TPFPlatform.PS4, TPFPlatform.XboxOne}:
            self.header.dxgi_format = reader.unpack_value("i")
        if has_float_struct:
            self.float_struct = FloatStruct.unpack_from(reader)

        with reader.temp_offset(file_offset):
            self.data = reader.read(file_size)
        if self.texture_flags in {2, 3}:
            # Data is DCX-compressed.
            # TODO: should enforce DCX type as 'DCP_EDGE'?
            self.data = decompress(self.data)

        self.name = reader.unpack_string(offset=name_offset, encoding=encoding)

        return self
Beispiel #22
0
    def unpack_header(self, reader: BinaryReader):
        reader.unpack_value("4s", asserted=b"BND4")
        self.unknown1 = reader.unpack_value("?")
        self.unknown2 = reader.unpack_value("?")
        reader.assert_pad(3)
        self.big_endian = reader.unpack_value("?")
        self.bit_big_endian = not reader.unpack_value("?")  # note reversal
        reader.assert_pad(1)

        reader.byte_order = ">" if self.big_endian else "<"  # no need to check flags for an override in BND4

        entry_count = reader.unpack_value("i")
        reader.unpack_value("q", asserted=0x40)  # header size
        self.signature = reader.unpack_value("8s").decode("ascii").rstrip("\0")
        entry_header_size = reader.unpack_value("q")
        data_offset = reader.unpack_value(
            "q")  # end of all headers, including hash table
        self.unicode = reader.unpack_value("?")
        self.flags = BinderFlags.read(reader, self.bit_big_endian)
        self.hash_table_type = reader.unpack_value("B")
        reader.assert_pad(5)
        hash_table_offset = reader.unpack_value("q")

        return entry_count, entry_header_size, hash_table_offset, data_offset
Beispiel #23
0
    def unpack(self, reader: BinaryReader, **kwargs):
        self.byte_order = reader.byte_order = ">" if reader.unpack_value(
            "B", offset=44) == 255 else "<"
        version_info = reader.unpack("bbb", offset=45)
        self.flags1 = ParamFlags1(version_info[0])
        self.flags2 = ParamFlags2(version_info[1])
        self.paramdef_format_version = version_info[2]
        header_struct = self.GET_HEADER_STRUCT(self.flags1, self.byte_order)
        header = reader.unpack_struct(header_struct)
        try:
            self.param_type = header["param_type"]
        except KeyError:
            self.param_type = reader.unpack_string(
                offset=header["param_type_offset"], encoding="utf-8")
        self.paramdef_data_version = header["paramdef_data_version"]
        self.unknown = header["unknown"]
        # Row data offset in header not used. (It's an unsigned short, yet doesn't limit row count to 5461.)
        name_data_offset = header[
            "name_data_offset"]  # CANNOT BE TRUSTED IN VANILLA FILES! Off by +12 bytes.

        # Load row pointer data.
        row_struct = self.ROW_STRUCT_64 if self.flags1.LongDataOffset else self.ROW_STRUCT_32
        row_pointers = reader.unpack_structs(row_struct,
                                             count=header["row_count"])
        row_data_offset = reader.position  # Reliable row data offset.

        # Row size is lazily determined. TODO: Unpack row data in sequence and associate with names separately.
        if len(row_pointers) == 0:
            return
        elif len(row_pointers) == 1:
            # NOTE: The only vanilla param in Dark Souls with one row is LEVELSYNC_PARAM_ST (Remastered only),
            # for which the row size is hard-coded here. Otherwise, we can trust the repacked offset from Soulstruct
            # (and SoulsFormats, etc.).
            if self.param_type == "LEVELSYNC_PARAM_ST":
                row_size = 220
            else:
                row_size = name_data_offset - row_data_offset
        else:
            row_size = row_pointers[1]["data_offset"] - row_pointers[0][
                "data_offset"]

        # Note that we no longer need to track reader offset.
        name_encoding = self.get_name_encoding()
        for row_struct in row_pointers:
            reader.seek(row_struct["data_offset"])
            row_data = reader.read(row_size)
            if row_struct["name_offset"] != 0:
                try:
                    name = reader.unpack_string(
                        offset=row_struct["name_offset"],
                        encoding=name_encoding,
                        reset_old_offset=False,  # no need to reset
                    )
                except UnicodeDecodeError as ex:
                    if ex.object in self.undecodable_row_names:
                        name = reader.unpack_bytes(
                            offset=row_struct["name_offset"],
                            reset_old_offset=False,  # no need to reset
                        )
                    else:
                        raise
                except ValueError:
                    reader.seek(row_struct["name_offset"])
                    _LOGGER.error(
                        f"Error encountered while parsing row name string in {self.param_type}.\n"
                        f"    Header: {header}\n"
                        f"    Row Struct: {row_struct}\n"
                        f"    30 chrs of name data: {' '.join(f'{{:02x}}'.format(x) for x in reader.read(30))}"
                    )
                    raise
            else:
                name = ""
            self.rows[row_struct["id"]] = ParamRow(row_data,
                                                   self.paramdef,
                                                   name=name)
Beispiel #24
0
    def read(self, reader: BinaryReader, layout: BufferLayout,
             uv_factor: float):
        self.uvs = []
        self.tangents = []
        self.colors = []

        with reader.temp_offset(reader.position):
            self.raw = reader.read(layout.get_total_size())

        for member in layout:

            not_implemented = False

            if member.semantic == LayoutSemantic.Position:
                if member.layout_type == LayoutType.Float3:
                    self.position = Vector3(reader.unpack("<3f"))
                elif member.layout_type == LayoutType.Float4:
                    self.position = Vector3(reader.unpack("<3f"))[:3]
                elif member.layout_type == LayoutType.EdgeCompressed:
                    raise NotImplementedError(
                        "Soulstruct cannot load FLVERs with edge-compressed vertex positions."
                    )
                else:
                    not_implemented = True
            elif member.semantic == LayoutSemantic.BoneWeights:
                if member.layout_type == LayoutType.Byte4A:
                    self.bone_weights = VertexBoneWeights(
                        *[w / 127.0 for w in reader.unpack("<4b")])
                elif member.layout_type == LayoutType.Byte4C:
                    self.bone_weights = VertexBoneWeights(
                        *[w / 255.0 for w in reader.unpack("<4B")])
                elif member.layout_type in {
                        LayoutType.UVPair, LayoutType.Short4ToFloat4A
                }:
                    self.bone_weights = VertexBoneWeights(
                        *[w / 32767.0 for w in reader.unpack("<4h")])
                else:
                    not_implemented = True
            elif member.semantic == LayoutSemantic.BoneIndices:
                if member.layout_type in {
                        LayoutType.Byte4B, LayoutType.Byte4E
                }:
                    self.bone_indices = VertexBoneIndices(
                        *reader.unpack("<4B"))
                elif member.layout_type == LayoutType.ShortBoneIndices:
                    self.bone_indices = VertexBoneIndices(
                        *reader.unpack("<4h"))
                else:
                    not_implemented = True
            elif member.semantic == LayoutSemantic.Normal:
                if member.layout_type == LayoutType.Float3:
                    self.normal = Vector3(reader.unpack("<3f"))
                elif member.layout_type == LayoutType.Float4:
                    self.normal = Vector3(reader.unpack("<3f"))
                    float_normal_w = reader.unpack_value("<f")
                    self.normal_w = int(float_normal_w)
                    if self.normal_w != float_normal_w:
                        raise ValueError(
                            f"`normal_w` float was not a whole number.")
                elif member.layout_type in {
                        LayoutType.Byte4A, LayoutType.Byte4B,
                        LayoutType.Byte4C, LayoutType.Byte4E
                }:
                    self.normal = Vector3([(x - 127) / 127.0
                                           for x in reader.unpack("<3B")])
                    self.normal_w = reader.unpack_value("<B")
                elif member.layout_type == LayoutType.Short2toFloat2:
                    self.normal_w = reader.unpack_value("<B")
                    self.normal = Vector3(
                        [x / 127.0 for x in reader.unpack("<3b")])
                elif member.layout_type == LayoutType.Short4ToFloat4A:
                    self.normal = Vector3(
                        [x / 32767.0 for x in reader.unpack("<3h")])
                    self.normal_w = reader.unpack_value("<h")
                elif member.layout_type == LayoutType.Short4ToFloat4B:
                    self.normal = Vector3([(x - 32767) / 32767.0
                                           for x in reader.unpack("<3H")])
                    self.normal_w = reader.unpack_value("<h")
                else:
                    not_implemented = True
            elif member.semantic == LayoutSemantic.UV:
                if member.layout_type == LayoutType.Float2:
                    self.uvs.append(Vector3(*reader.unpack("<2f"), 0.0))
                elif member.layout_type == LayoutType.Float3:
                    self.uvs.append(Vector3(*reader.unpack("<3f")))
                elif member.layout_type == LayoutType.Float4:
                    self.uvs.append(Vector3(*reader.unpack("<2f"), 0.0))
                    self.uvs.append(Vector3(*reader.unpack("<2f"), 0.0))
                elif member.layout_type in {
                        LayoutType.Byte4A, LayoutType.Byte4B,
                        LayoutType.Short2toFloat2, LayoutType.Byte4C,
                        LayoutType.UV
                }:
                    self.uvs.append(
                        Vector3(*reader.unpack("<2h"), 0) / uv_factor)
                elif member.layout_type == LayoutType.UVPair:
                    self.uvs.append(
                        Vector3(*reader.unpack("<2h"), 0) / uv_factor)
                    self.uvs.append(
                        Vector3(*reader.unpack("<2h"), 0) / uv_factor)
                elif member.layout_type == LayoutType.Short4ToFloat4B:
                    self.uvs.append(Vector3(*reader.unpack("<3h")) / uv_factor)
                    if reader.unpack_value("<h") != 0:
                        raise ValueError(
                            "Expected zero short after reading UV | Short4ToFloat4B vertex member."
                        )
                else:
                    not_implemented = True
            elif member.semantic == LayoutSemantic.Tangent:
                if member.layout_type == LayoutType.Float4:
                    self.tangents.append(Vector4(*reader.unpack("<4f")))
                elif member.layout_type in {
                        LayoutType.Byte4A,
                        LayoutType.Byte4B,
                        LayoutType.Byte4C,
                        LayoutType.Short4ToFloat4A,
                        LayoutType.Byte4E,
                }:
                    tangent = Vector4([(x - 127) / 127.0
                                       for x in reader.unpack("<4B")])
                    self.tangents.append(tangent)
                else:
                    not_implemented = True
            elif member.semantic == LayoutSemantic.Bitangent:
                if member.layout_type in {
                        LayoutType.Byte4A, LayoutType.Byte4B,
                        LayoutType.Byte4C, LayoutType.Byte4E
                }:
                    self.bitangent = Vector4([(x - 127) / 127.0
                                              for x in reader.unpack("<4B")])
                else:
                    not_implemented = True
            elif member.semantic == LayoutSemantic.VertexColor:
                if member.layout_type == LayoutType.Float4:
                    self.colors.append(ColorRGBA(*reader.unpack("<4f")))
                elif member.layout_type in {
                        LayoutType.Byte4A, LayoutType.Byte4C
                }:
                    # Convert byte channnels [0-255] to float channels [0-1].
                    self.colors.append(
                        ColorRGBA(*[b / 255.0 for b in reader.unpack("<4B")]))
                else:
                    not_implemented = True
            else:
                not_implemented = True

            if not_implemented:
                raise NotImplementedError(
                    f"Unsupported vertex member semantic/type combination: "
                    f"{member.semantic.name} | {member.layout_type.name}")