示例#1
0
    def unpack(self, esd_reader: BinaryReader, **kwargs):

        header = esd_reader.unpack_struct(self.EXTERNAL_HEADER_STRUCT)
        # Internal offsets start here, so we reset the buffer.
        esd_reader = BinaryReader(esd_reader.read())

        internal_header = esd_reader.unpack_struct(self.INTERNAL_HEADER_STRUCT)
        self.magic = internal_header["magic"]
        state_machine_headers = esd_reader.unpack_structs(
            self.STATE_MACHINE_HEADER_STRUCT,
            count=header["state_machine_count"])

        for state_machine_header in state_machine_headers:
            states = self.State.unpack(
                esd_reader,
                state_machine_header["state_machine_offset"],
                count=state_machine_header["state_count"],
            )
            self.state_machines[
                state_machine_header["state_machine_index"]] = states

        if internal_header["esd_name_length"] > 0:
            esd_name_offset = internal_header["esd_name_offset"]
            esd_name_length = internal_header["esd_name_length"]
            # Note the given length is the length of the final string. The actual UTF-16 encoded bytes are twice that.
            self.esd_name = esd_reader.unpack_string(offset=esd_name_offset,
                                                     length=2 *
                                                     esd_name_length,
                                                     encoding="utf-16le")
            esd_reader.seek(esd_name_offset + 2 * esd_name_length)
            self.file_tail = esd_reader.read()
        else:
            self.esd_name = ""
            esd_reader.seek(header["unk_offset_1"])  # after packed EZL
            self.file_tail = esd_reader.read()
示例#2
0
    def unpack(self, msb_reader: BinaryReader):
        part_offset = msb_reader.position

        header = msb_reader.unpack_struct(self.PART_HEADER_STRUCT)
        if header["__part_type"] != self.ENTRY_SUBTYPE:
            raise ValueError(f"Unexpected part type enum {header['part_type']} for class {self.__class__.__name__}.")
        self._instance_index = header["_instance_index"]
        self._model_index = header["_model_index"]
        self._part_type_index = header["_part_type_index"]
        for transform in ("translate", "rotate", "scale"):
            setattr(self, transform, Vector3(header[transform]))
        self._draw_groups = int_group_to_bit_set(header["__draw_groups"], assert_size=8)
        self._display_groups = int_group_to_bit_set(header["__display_groups"], assert_size=8)
        self._backread_groups = int_group_to_bit_set(header["__backread_groups"], assert_size=8)
        self.description = msb_reader.unpack_string(
            offset=part_offset + header["__description_offset"], encoding="utf-16-le",
        )
        self.name = msb_reader.unpack_string(
            offset=part_offset + header["__name_offset"], encoding="utf-16-le",
        )
        self.sib_path = msb_reader.unpack_string(
            offset=part_offset + header["__sib_path_offset"], encoding="utf-16-le",
        )

        msb_reader.seek(part_offset + header["__base_data_offset"])
        base_data = msb_reader.unpack_struct(self.PART_BASE_DATA_STRUCT)
        self.set(**base_data)

        msb_reader.seek(part_offset + header["__type_data_offset"])
        self.unpack_type_data(msb_reader)

        self._unpack_gparam_data(msb_reader, part_offset, header)
        self._unpack_scene_gparam_data(msb_reader, part_offset, header)
示例#3
0
    def unpack(self, reader: BinaryReader, **kwargs):
        entry_count, entry_header_size, hash_table_offset, data_offset = self.unpack_header(
            reader)

        flags_header_size = self.flags.get_bnd_entry_header_size()
        if entry_header_size != flags_header_size:
            raise ValueError(
                f"Expected BND entry header size {flags_header_size} based on flags\n"
                f"{self.flags:08b}, but BND header says {entry_header_size}.")
        if self.hash_table_type != 4 and hash_table_offset != 0:
            _LOGGER.warning(
                f"Found non-zero hash table offset {hash_table_offset}, but header says this BND has no hash "
                f"table.")

        entry_headers = [
            BinderEntryHeader.from_bnd4_reader(reader, self.flags,
                                               self.bit_big_endian,
                                               self.unicode)
            for _ in range(entry_count)
        ]
        for entry_header in entry_headers:
            self.add_entry(BinderEntry.from_header(reader, entry_header))

        if self.hash_table_type == 4:
            # Save the initial hash table.
            reader.seek(hash_table_offset)
            self._most_recent_hash_table = reader.read(data_offset -
                                                       hash_table_offset)
        self._most_recent_entry_count = len(self._entries)
        self._most_recent_paths = [entry.path for entry in self._entries]
示例#4
0
    def unpack_event_dict(
        cls,
        reader: BinaryReader,
        instruction_table_offset,
        base_arg_data_offset,
        event_arg_table_offset,
        event_layers_table_offset,
        count=1,
    ) -> dict[int, Event]:
        event_dict = {}
        struct_dicts = reader.unpack_structs(cls.HEADER_STRUCT, count=count)

        for d in struct_dicts:
            reader.seek(instruction_table_offset +
                        d["first_instruction_offset"])
            instruction_list = cls.Instruction.unpack(
                reader,
                base_arg_data_offset,
                event_layers_table_offset,
                count=d["instruction_count"])

            reader.seek(event_arg_table_offset + d["first_event_arg_offset"])
            event_args = cls.EventArg.unpack(reader,
                                             count=d["event_arg_count"])

            for arg_r in event_args:
                # Attach event arg replacements to their instruction line.
                instruction_list[arg_r.line].event_args.append(arg_r)

            event_dict[d["event_id"]] = cls(d["event_id"], d["restart_type"],
                                            instruction_list)

        return event_dict
示例#5
0
    def unpack(self, msb_reader: BinaryReader, **kwargs):
        """Unpack an MSB from the given reader."""

        # Read (and ignore) constant header, if applicable.
        if self.HEADER:
            msb_reader.seek(msb_reader.position + len(self.HEADER))

        self.models = self.MODEL_LIST_CLASS(msb_reader)
        self.events = self.EVENT_LIST_CLASS(msb_reader)
        self.regions = self.REGION_LIST_CLASS(msb_reader)
        self.parts = self.PART_LIST_CLASS(msb_reader)

        model_names = self.models.set_and_get_unique_names()
        environment_names = self.events.get_entry_names(
            MSBEventSubtype.Environment)
        region_names = self.regions.set_and_get_unique_names()
        part_names = self.parts.set_and_get_unique_names()
        collision_names = self.parts.get_entry_names(MSBPartSubtype.Collision)

        self.events.set_names(region_names=region_names, part_names=part_names)
        self.parts.set_names(
            model_names=model_names,
            environment_names=environment_names,
            region_names=region_names,
            part_names=part_names,
            collision_names=collision_names,
        )
示例#6
0
 def unpack_header(self, reader: BinaryReader) -> int:
     self.big_endian = reader.unpack_value("?", offset=0xD)
     reader.byte_order = ">" if self.big_endian else "<"
     self.bit_big_endian = reader.unpack_value("?", offset=0xE)
     reader.unpack_value("4s", asserted=b"BND3")
     self.signature = reader.unpack_value("8s").decode("ascii").rstrip("\0")
     self.flags = BinderFlags.read(reader, self.bit_big_endian)
     reader.byte_order = ">" if self.big_endian or self.flags.is_big_endian else "<"
     reader.seek(2, 1)  # skip peeked endian bytes
     reader.assert_pad(1)
     entry_count = reader.unpack_value("i")
     reader.seek(12, 1)  # skip file size
     return entry_count
示例#7
0
    def unpack(cls, reader: BinaryReader, event_layers_offset):
        """Unpack event layer bit field as <a, b, c, ...> where a, b, c, ... are the little-endian bit
        zero-based indices of the event layer bit field. 

        e.g. field 01001...110 would be {1, 4, 29, 30}.
        """
        reader.seek(event_layers_offset)
        d = reader.unpack_struct(cls.HEADER_STRUCT)
        enabled_event_layers_list = []
        for i in range(32):
            if (2 ** i) & d["event_layers"]:
                enabled_event_layers_list.append(i)
        return cls(enabled_event_layers_list)
示例#8
0
 def unpack(self, msb_reader: BinaryReader):
     event_offset = msb_reader.position
     header = msb_reader.unpack_struct(self.EVENT_HEADER_STRUCT)
     if header["__event_type"] != self.ENTRY_SUBTYPE:
         raise ValueError(f"Unexpected MSB event type value {header['__event_type']} for {self.__class__.__name__}.")
     msb_reader.seek(event_offset + header["__base_data_offset"])
     base_data = msb_reader.unpack_struct(self.EVENT_BASE_DATA_STRUCT)
     name_offset = event_offset + header["__name_offset"]
     self.name = msb_reader.unpack_string(offset=name_offset, encoding=self.NAME_ENCODING)
     self.set(**header)
     self.set(**base_data)
     msb_reader.seek(event_offset + header["__type_data_offset"])
     self.unpack_type_data(msb_reader)
示例#9
0
    def unpack(cls, esd_reader: BinaryReader, state_machine_offset,
               count) -> dict[int, State]:
        """Unpack multiple states from the same state table.

        Returns a dictionary of states, because it's always possible (if yet unseen) that state indices are not
        contiguous. State 0 is not repeated, as it generally is in the packed table.
        """

        state_dict = {}
        esd_reader.seek(state_machine_offset)
        struct_dicts = esd_reader.unpack_structs(cls.STRUCT, count=count)

        for d in struct_dicts:
            conditions = cls.Condition.unpack(
                esd_reader,
                d["condition_pointers_offset"],
                count=d["condition_pointers_count"],
            )

            enter_commands = cls.Command.unpack(
                esd_reader,
                d["enter_commands_offset"],
                count=d["enter_commands_count"],
            )

            exit_commands = cls.Command.unpack(
                esd_reader,
                d["exit_commands_offset"],
                count=d["exit_commands_count"],
            )

            ongoing_commands = cls.Command.unpack(
                esd_reader,
                d["ongoing_commands_offset"],
                count=d["ongoing_commands_count"],
            )

            # State 0 will be overwritten when repeated at the end of the table, rather than added.
            state_dict[d["index"]] = cls(
                d["index"],
                conditions,
                enter_commands,
                exit_commands,
                ongoing_commands,
            )

        return state_dict
示例#10
0
    def detect(cls, reader: BinaryReader) -> DCXType:
        """Detect type of DCX. Resets offset when done."""
        old_offset = reader.tell()

        dcx_type = cls.Unknown

        magic = reader.unpack_value("4s")
        if magic == b"DCP\0":  # rare, only for older games and DeS test maps
            # Possible file pattern for DFLT or EDGE compression.
            dcx_fmt = reader.unpack_value("4s", offset=4)
            if dcx_fmt == b"DCP\0":
                dcx_type = cls.DCP_DFLT
            elif dcx_fmt == b"EDGE":
                dcx_type = cls.DCP_EDGE
        elif magic == b"DCX\0":
            dcx_fmt = reader.unpack_value("4s", offset=0x28)
            if dcx_fmt == b"EDGE":
                dcx_type = cls.DCX_EDGE
            elif dcx_fmt == b"DFLT":
                # Check four unknown header fields to determine DFLT subtype.
                unk04 = reader.unpack_value("i", offset=0x4)
                unk10 = reader.unpack_value("i", offset=0x10)
                unk30 = reader.unpack_value("i", offset=0x30)
                unk38 = reader.unpack_value("B", offset=0x38)
                if unk10 == 0x24:
                    dcx_type = cls.DCX_DFLT_10000_24_9
                elif unk10 == 0x44:
                    if unk04 == 0x10000:
                        dcx_type = cls.DCX_DFLT_10000_44_9
                    elif unk04 == 0x11000:
                        if unk30 == 0x8000000:
                            dcx_type = cls.DCX_DFLT_11000_44_8
                        elif unk30 == 0x9000000:
                            if unk38 == 15:
                                dcx_type = cls.DCX_DFLT_11000_44_9_15
                            elif unk38 == 0:
                                dcx_type = cls.DCX_DFLT_11000_44_9
            elif dcx_fmt == b"KRAK":  # requires `oo2core_6_win64.dll`
                dcx_type = cls.DCX_KRAK
        else:
            b0 = reader.unpack_value("B", offset=0)
            b1 = reader.unpack_value("B", offset=1)
            if b0 == 0x78 and (b1 in {0x01, 0x5E, 0x9C, 0xDA}):
                dcx_type = cls.Zlib

        reader.seek(old_offset)
        return dcx_type
示例#11
0
文件: bxf.py 项目: LugeBox/soulstruct
    def unpack_header(self, reader: BinaryReader):
        reader.unpack_value("4s", asserted=b"BND4")
        self.unknown1 = reader.unpack_value("?")
        self.unknown2 = reader.unpack_value("?")
        reader.assert_pad(3)
        self.big_endian = reader.unpack_value("?")
        self.bit_big_endian = not reader.unpack_value("?")  # note reversal
        reader.assert_pad(1)

        reader.byte_order = ">" if self.big_endian else "<"  # no need to check flags for an override in BND4

        entry_count = reader.unpack_value("i")
        reader.unpack_value("q", asserted=0x40)  # header size
        self.signature = reader.unpack_value("8s").decode("ascii").rstrip("\0")
        entry_header_size = reader.unpack_value("q")
        data_offset = reader.unpack_value(
            "q")  # end of all headers, including hash table
        self.unicode = reader.unpack_value("?")
        self.flags = BinderFlags.read(reader, self.bit_big_endian)
        self.hash_table_type = reader.unpack_value("B")
        reader.assert_pad(5)
        hash_table_offset = reader.unpack_value("q")

        flags_header_size = self.flags.get_bnd_entry_header_size()
        if entry_header_size != flags_header_size:
            raise ValueError(
                f"Expected BND entry header size {flags_header_size} based on flags\n"
                f"{self.flags:08b}, but BND header says {entry_header_size}.")
        if self.hash_table_type != 4 and hash_table_offset != 0:
            _LOGGER.warning(
                f"Found non-zero hash table offset {hash_table_offset}, but header says this BHD has no hash "
                f"table.")

        entry_headers = [
            BinderEntryHeader.from_bnd4_reader(reader, self.flags,
                                               self.bit_big_endian,
                                               self.unicode)
            for _ in range(entry_count)
        ]

        if self.hash_table_type == 4:
            # Save the initial hash table.
            reader.seek(hash_table_offset)
            self._most_recent_hash_table = reader.read(data_offset -
                                                       hash_table_offset)

        return entry_headers
示例#12
0
    def unpack(cls,
               reader: BinaryReader,
               base_arg_data_offset,
               event_layers_table_offset,
               count=1):
        """Unpack some number of Instructions into a list, starting from the current file offset."""

        instructions = []
        struct_dicts = reader.unpack_structs(cls.HEADER_STRUCT, count=count)
        for i, d in enumerate(struct_dicts):

            # Process arguments.
            try:
                args_format, args_list = get_instruction_args(
                    reader,
                    d["category"],
                    d["index"],
                    base_arg_data_offset + d["first_base_arg_offset"],
                    d["base_args_size"],
                    cls.INSTRUCTION_ARG_TYPES,
                )
            except KeyError:
                args_size = struct_dicts[
                    i +
                    1]["first_base_arg_offset"] - d["first_base_arg_offset"]
                reader.seek(base_arg_data_offset + d["first_base_arg_offset"])
                raw_data = reader.read(args_size)
                _LOGGER.error(
                    f"Error while processing instruction arguments. Raw arg data: {raw_data}"
                )
                raise

            # Process event layers.
            if d["first_event_layers_offset"] > 0:
                event_layers = cls.EventLayers.unpack(
                    reader,
                    event_layers_table_offset + d["first_event_layers_offset"])
            else:
                event_layers = None

            instructions.append(
                cls(d["category"], d["index"], args_format, args_list,
                    event_layers))

        return instructions
示例#13
0
    def unpack(cls, esd_reader: BinaryReader, commands_offset, count=1):
        """ Returns a list of Command instances. """
        commands = []
        if commands_offset == -1:
            return commands
        struct_dicts = esd_reader.unpack_structs(cls.STRUCT, count=count, offset=commands_offset)

        for d in struct_dicts:
            if d["args_offset"] > 0:
                esd_reader.seek(d["args_offset"])
                arg_structs = cls.ARG_STRUCT.unpack_count(esd_reader, count=d["args_count"])
                args = [
                    esd_reader.unpack_bytes(offset=a["arg_ezl_offset"], length=a["arg_ezl_size"])
                    for a in arg_structs
                ]
            else:
                args = []
            commands.append(cls(d["bank"], d["index"], args))

        return commands
示例#14
0
    def unpack(self, msb_reader: BinaryReader):
        header = msb_reader.unpack_struct(self.MAP_ENTITY_LIST_HEADER)
        entry_offsets = [
            msb_reader.unpack_struct(
                self.MAP_ENTITY_ENTRY_OFFSET)["entry_offset"]
            for _ in range(header["entry_offset_count"] -
                           1)  # 'entry_offset_count' includes tail offset
        ]
        next_entry_list_offset = msb_reader.unpack_struct(
            self.MAP_ENTITY_LIST_TAIL)["next_entry_list_offset"]
        self.name = msb_reader.unpack_string(offset=header["name_offset"],
                                             encoding=self.NAME_ENCODING)

        self._entries = []

        for entry_offset in entry_offsets:
            msb_reader.seek(entry_offset)
            entry = self.ENTRY_CLASS(msb_reader)
            self._entries.append(entry)

        msb_reader.seek(next_entry_list_offset)
示例#15
0
    def unpack(self, emevd_reader: BinaryReader, **kwargs):
        header = emevd_reader.unpack_struct(self.HEADER_STRUCT)

        emevd_reader.seek(header["event_table_offset"])
        event_dict = self.Event.unpack_event_dict(
            emevd_reader,
            header["instruction_table_offset"],
            header["base_arg_data_offset"],
            header["event_arg_table_offset"],
            header["event_layers_table_offset"],
            count=header["event_count"],
        )

        self.events.update(event_dict)

        if header["packed_strings_size"] != 0:
            emevd_reader.seek(header["packed_strings_offset"])
            self.packed_strings = emevd_reader.read(
                header["packed_strings_size"])

        if header["linked_files_count"] != 0:
            emevd_reader.seek(header["linked_files_table_offset"])
            # These are relative offsets into the packed string data.
            for _ in range(header["linked_files_count"]):
                self.linked_file_offsets.append(
                    struct.unpack("<Q", emevd_reader.read(8))[0])

        # Parse event args for `RunEvent` and `RunCommonEvent` instructions.
        for event in self.events.values():
            event.update_evs_function_args()
        for event in self.events.values():
            event.update_run_event_instructions()
示例#16
0
def get_instruction_args(reader: BinaryReader, category, index,
                         first_arg_offset, event_args_size, emedf: dict):
    """Process instruction arguments (required and optional) from EMEVD binary."""

    try:
        emedf_args_info = emedf[category, index]["args"]
    except KeyError:
        raise KeyError(
            f"Could not find instruction ({category}, {index}) in `Instruction.EMEDF`."
        )
    previous_offset = reader.position
    if event_args_size == 0:
        return "", []
    try:
        args_format = "@" + "".join(arg["internal_type"].get_fmt()
                                    for arg in emedf_args_info.values())
    except KeyError:
        raise KeyError(
            f"Cannot find argument types for instruction {category}[{index:02d}] ({event_args_size} bytes)"
        )

    # 's' arguments are actually four-byte offsets into the packed string data, though we will keep the 's' symbol.
    struct_args_format = args_format.replace("s", "I")
    required_args_size = struct.calcsize(struct_args_format)
    if required_args_size > event_args_size:
        raise ValueError(
            f"Documented size of minimum required args for instruction {category}"
            f"[{index}] is {required_args_size}, but size of args specified in EMEVD file is "
            f"only {event_args_size}.")

    reader.seek(first_arg_offset)
    args = reader.unpack(struct_args_format)

    # Additional arguments may appear for the instruction 2000[00], 'RunEvent'. These instructions are tightly packed
    # and are always aligned to 4. We read them here as unsigned integers and must actually parse the called event ID to
    # interpret them properly (done at `EMEVD` class level).

    extra_size = event_args_size - required_args_size

    opt_arg_count = extra_size // 4
    if opt_arg_count == 0:
        reader.seek(previous_offset)
        return args_format[1:], list(args)
    elif (category, index) not in _OPTIONAL_ARGS_ALLOWED:
        raise ValueError(
            f"Extra arguments found for instruction {category}[{index}], which is not permitted. Arg types may be "
            f"wrong (too short) for this instruction.\n"
            f"    required size = {required_args_size}\n"
            f"    actual size = {event_args_size}")
    elif extra_size % 4 != 0:
        raise ValueError(
            f"Error interpreting instruction {category}[{index}]: optional argument "
            f"size is not a multiple of four bytes ({extra_size}).")

    opt_args = [reader.unpack_value("<I") for _ in range(opt_arg_count)]
    reader.seek(previous_offset)
    return args_format[1:] + "|" + "I" * (extra_size //
                                          4), list(args) + opt_args
示例#17
0
    def unpack(self, msb_reader: BinaryReader):
        region_offset = msb_reader.position
        base_data = msb_reader.unpack_struct(self.REGION_STRUCT)
        self.name = msb_reader.unpack_string(
            offset=region_offset + base_data["name_offset"],
            encoding=self.NAME_ENCODING,
        )
        self._region_index = base_data["__region_index"]
        self.translate = Vector3(base_data["translate"])
        self.rotate = Vector3(base_data["rotate"])
        self.check_null_field(msb_reader,
                              region_offset + base_data["unknown_offset_1"])
        self.check_null_field(msb_reader,
                              region_offset + base_data["unknown_offset_2"])

        if base_data["type_data_offset"] != 0:
            msb_reader.seek(region_offset + base_data["type_data_offset"])
            self.unpack_type_data(msb_reader)

        msb_reader.seek(region_offset + base_data["entity_id_offset"])
        self.entity_id = msb_reader.unpack_value("i")

        return region_offset + base_data["entity_id_offset"]
示例#18
0
    def unpack_event_dict(
        cls,
        reader: BinaryReader,
        instruction_table_offset,
        base_arg_data_offset,
        event_arg_table_offset,
        event_layers_table_offset,
        count=1,
    ) -> dict[int, Event]:
        event_dict = {}
        struct_dicts = reader.unpack_structs(cls.HEADER_STRUCT, count=count)

        for d in struct_dicts:
            reader.seek(instruction_table_offset +
                        d["first_instruction_offset"])
            instruction_list = cls.Instruction.unpack(
                reader,
                base_arg_data_offset,
                event_layers_table_offset,
                count=d["instruction_count"])

            reader.seek(event_arg_table_offset + d["first_event_arg_offset"])
            event_args = cls.EventArg.unpack(reader,
                                             count=d["event_arg_count"])

            for arg_r in event_args:
                # Attach event arg replacements to their instruction line.
                instruction_list[arg_r.line].event_args.append(arg_r)

            if event_id := d["event_id"] in event_dict:
                _LOGGER.warning(
                    f"Event ID {event_id} appears multiple times in EMEVD file. Only the first one will be kept."
                )
            else:
                event_dict[d["event_id"]] = cls(d["event_id"],
                                                d["restart_type"],
                                                instruction_list)
示例#19
0
    def unpack(self, msb_reader: BinaryReader):
        header = msb_reader.unpack_struct(self.MAP_ENTITY_LIST_HEADER)
        entry_offsets = [
            msb_reader.unpack_struct(
                self.MAP_ENTITY_ENTRY_OFFSET)["entry_offset"]
            for _ in range(header["entry_offset_count"] -
                           1)  # 'entry_offset_count' includes tail offset
        ]
        next_entry_list_offset = msb_reader.unpack_struct(
            self.MAP_ENTITY_LIST_TAIL)["next_entry_list_offset"]
        name = msb_reader.unpack_string(offset=header["name_offset"],
                                        encoding=self.NAME_ENCODING)
        if name != self.INTERNAL_NAME:
            raise ValueError(
                f"MSB entry list internal name '{name}' does not match known name '{self.INTERNAL_NAME}'."
            )
        self._entries = []

        for entry_offset in entry_offsets:
            msb_reader.seek(entry_offset)
            entry = self.ENTRY_CLASS(msb_reader)
            self._entries.append(entry)

        msb_reader.seek(next_entry_list_offset)
示例#20
0
 def _check_big_endian_and_struct_64(gnl_reader: BinaryReader):
     """Guessed based on the number and position of zero bytes the first offset."""
     gnl_reader.seek(0)
     # First two bytes of first offset should be zeroes if big-endian.
     big_endian = gnl_reader.unpack_value("h") == 0
     if big_endian:
         # Remainder of first half of first offset should be zeroes if 64-bit.
         use_struct_64 = gnl_reader.unpack_value("h") == 0
     else:
         # Second half of first offset should be zeroes if 64-bit.
         gnl_reader.seek(4)
         use_struct_64 = gnl_reader.unpack_value("i") == 0
     gnl_reader.seek(0)
     return big_endian, use_struct_64
示例#21
0
def get_instruction_args(reader: BinaryReader, category, index,
                         first_arg_offset, event_args_size, format_dict):
    """Process instruction arguments (required and optional) from EMEVD binary."""

    previous_offset = reader.position
    if event_args_size == 0:
        return "", []
    try:
        args_format = "@" + format_dict[category][index]
    except KeyError:
        raise KeyError(
            f"Cannot find argument types for instruction {category}[{index:02d}]."
        )

    # 's' arguments are actually four-byte offsets into the packed string data, though we will keep the 's' symbol.
    struct_args_format = args_format.replace("s", "I")
    required_args_size = struct.calcsize(struct_args_format)
    if required_args_size > event_args_size:
        raise ValueError(
            f"Documented size of minimum required args for instruction {category}"
            f"[{index}] is {required_args_size}, but size of args specified in EMEVD file is "
            f"only {event_args_size}.")

    reader.seek(first_arg_offset)
    args = reader.unpack(struct_args_format)

    # Additional arguments may appear for the instruction 2000[00], 'RunEvent'. These instructions are tightly packed
    # and are always aligned to 4. We read them here as unsigned integers and must actually parse the called event ID to
    # interpret them properly (done at `EMEVD` class level).

    extra_size = event_args_size - required_args_size

    opt_arg_count = extra_size // 4
    if opt_arg_count == 0:
        reader.seek(previous_offset)
        return args_format[1:], list(args)
    elif extra_size % 4 != 0:
        raise ValueError(
            f"Error interpreting instruction {category}[{index}]: optional argument "
            f"size is not a multiple of four bytes ({extra_size}).")

    opt_args = [reader.unpack_value("<I") for _ in range(opt_arg_count)]
    reader.seek(previous_offset)
    return args_format[1:] + "|" + "I" * (extra_size //
                                          4), list(args) + opt_args
示例#22
0
文件: bxf.py 项目: LugeBox/soulstruct
 def unpack(self, reader: BinaryReader, bdt_source: GameFile.Typing = None):
     entry_headers = self.unpack_header(reader)
     bdt_reader = BinaryReader(bdt_source)
     bdt_reader.seek(0x30)  # skip useless BDT header
     for entry_header in entry_headers:
         self.add_entry(BinderEntry.from_header(bdt_reader, entry_header))
示例#23
0
 def check_null_field(cls, msb_reader: BinaryReader, offset_to_null):
     msb_reader.seek(offset_to_null)
     zero = msb_reader.read(cls.UNKNOWN_DATA_SIZE)
     if zero != b"\0" * cls.UNKNOWN_DATA_SIZE:
         _LOGGER.warning(
             f"Null data entry in `{cls.__name__}` was not zero: {zero}.")
示例#24
0
 def _unpack_scene_gparam_data(self, msb_reader: BinaryReader, part_offset, header):
     if header["__scene_gparam_data_offset"] == 0:
         raise ValueError(f"Zero SceneGParam offset found in SceneGParam-supporting part {self.name}.")
     msb_reader.seek(part_offset + header["__scene_gparam_data_offset"])
     scene_gparam_data = msb_reader.unpack_struct(self.PART_SCENE_GPARAM_STRUCT)
     self.set(**scene_gparam_data)
示例#25
0
    def unpack(self, reader: BinaryReader, **kwargs):
        self.byte_order = reader.byte_order = ">" if reader.unpack_value(
            "B", offset=44) == 255 else "<"
        version_info = reader.unpack("bbb", offset=45)
        self.flags1 = ParamFlags1(version_info[0])
        self.flags2 = ParamFlags2(version_info[1])
        self.paramdef_format_version = version_info[2]
        header_struct = self.GET_HEADER_STRUCT(self.flags1, self.byte_order)
        header = reader.unpack_struct(header_struct)
        try:
            self.param_type = header["param_type"]
        except KeyError:
            self.param_type = reader.unpack_string(
                offset=header["param_type_offset"], encoding="utf-8")
        self.paramdef_data_version = header["paramdef_data_version"]
        self.unknown = header["unknown"]
        # Row data offset in header not used. (It's an unsigned short, yet doesn't limit row count to 5461.)
        name_data_offset = header[
            "name_data_offset"]  # CANNOT BE TRUSTED IN VANILLA FILES! Off by +12 bytes.

        # Load row pointer data.
        row_struct = self.ROW_STRUCT_64 if self.flags1.LongDataOffset else self.ROW_STRUCT_32
        row_pointers = reader.unpack_structs(row_struct,
                                             count=header["row_count"])
        row_data_offset = reader.position  # Reliable row data offset.

        # Row size is lazily determined. TODO: Unpack row data in sequence and associate with names separately.
        if len(row_pointers) == 0:
            return
        elif len(row_pointers) == 1:
            # NOTE: The only vanilla param in Dark Souls with one row is LEVELSYNC_PARAM_ST (Remastered only),
            # for which the row size is hard-coded here. Otherwise, we can trust the repacked offset from Soulstruct
            # (and SoulsFormats, etc.).
            if self.param_type == "LEVELSYNC_PARAM_ST":
                row_size = 220
            else:
                row_size = name_data_offset - row_data_offset
        else:
            row_size = row_pointers[1]["data_offset"] - row_pointers[0][
                "data_offset"]

        # Note that we no longer need to track reader offset.
        name_encoding = self.get_name_encoding()
        for row_struct in row_pointers:
            reader.seek(row_struct["data_offset"])
            row_data = reader.read(row_size)
            if row_struct["name_offset"] != 0:
                try:
                    name = reader.unpack_string(
                        offset=row_struct["name_offset"],
                        encoding=name_encoding,
                        reset_old_offset=False,  # no need to reset
                    )
                except UnicodeDecodeError as ex:
                    if ex.object in self.undecodable_row_names:
                        name = reader.unpack_bytes(
                            offset=row_struct["name_offset"],
                            reset_old_offset=False,  # no need to reset
                        )
                    else:
                        raise
                except ValueError:
                    reader.seek(row_struct["name_offset"])
                    _LOGGER.error(
                        f"Error encountered while parsing row name string in {self.param_type}.\n"
                        f"    Header: {header}\n"
                        f"    Row Struct: {row_struct}\n"
                        f"    30 chrs of name data: {' '.join(f'{{:02x}}'.format(x) for x in reader.read(30))}"
                    )
                    raise
            else:
                name = ""
            self.rows[row_struct["id"]] = ParamRow(row_data,
                                                   self.paramdef,
                                                   name=name)