Example #1
0
    def unpack(self, row_reader: BinaryReader):

        for field in self.paramdef.fields.values():

            if field.bit_count != -1:
                field_value = self.bit_reader.read(row_reader, field.bit_count,
                                                   field.fmt)
            else:
                self.bit_reader.clear()
                if issubclass(field.type_class, ft.basestring):
                    field_value = field.type_class.read(row_reader, field.size)
                elif field.type_class is ft.dummy8:
                    # These are often 'array' fields, but we don't even bother unpacking them.
                    field_value = row_reader.read(field.size)
                else:
                    data = row_reader.read(field.type_class.size())
                    try:
                        field_value = struct.unpack(field.fmt, data)[0]
                    except struct.error as e:
                        if field.display_name in {
                                "inverseToneMapMul", "sfxMultiplier"
                        }:
                            # These fields are malformed in m99 and default ToneMapBank in Dark Souls Remastered.
                            field_value = 1.0
                        else:
                            raise ValueError(
                                f"Could not unpack data for field {field.name} in ParamRow {self.name}.\n"
                                f"Field type: {field.display_type}\n"
                                f"Raw bytes: {data}\n"
                                f"Error:\n{str(e)}")
            self.fields[field.name] = bool(
                field_value) if field.bit_count == 1 else field_value
Example #2
0
    def unpack(self, reader: BinaryReader, start_offset: int, depth=0):
        data = reader.unpack_struct(self.STRUCT)
        name_length = data.pop("__name_length")
        self.name = reader.unpack_string(length=name_length, encoding="ascii")

        self.size = self.STRUCT.size + name_length
        self.depth = depth

        # TODO: Use `_properties` and `field` properties, which inspect the node name, etc.
        self._properties = [FBXProperty.unpack(reader) for _ in range(data.pop("__property_count"))]

        self.size += data.pop("__property_list_size")

        self.children = []
        end_offset = data.pop("__end_offset")
        while start_offset + self.size < end_offset:
            child = self.__class__(reader, start_offset=start_offset + self.size, depth=self.depth + 1)
            self.size += child.size
            if start_offset + self.size == end_offset:
                break  # empty node is not kept
            self.children.append(child)

        if self.name == "P":
            if self.children:
                raise ValueError("`FBXNode` named 'P' should not have any children.")
            name, *args = [p.value for p in self._properties]
            self._field = FBXPropertyField(name, *args)
        else:
            self._field = None
Example #3
0
 def unpack_from(cls, reader: BinaryReader, data_offset: int):
     header = FSBSampleHeader(reader)
     metadata_size = header.total_size - FSBSampleHeader.STRUCT.size
     metadata = reader.read(metadata_size)
     with reader.temp_offset(data_offset):
         data = reader.read(header.compressed_length)
     return FSBSample(header, metadata, data)
Example #4
0
    def decrypt_regulation_bin(self, data: bytes) -> bytes:
        try:
            # noinspection PyPackageRequirements
            from aespython import key_expander, aes_cipher, cbc_mode
        except ImportError:
            raise ModuleNotFoundError(
                f"Cannot decrypt `regulation.bin` for Elden Ring without `aespython` package."
            )

        iv = data[:16]
        encrypted = BinaryReader(data)

        key_expander_256 = key_expander.KeyExpander(256)
        expanded_key = key_expander_256.expand(bytearray(self.REGULATION_KEY))
        aes_cipher_256 = aes_cipher.AESCipher(expanded_key)
        aes_cbc_256 = cbc_mode.CBCMode(aes_cipher_256, 16)
        aes_cbc_256.set_iv(iv)

        decrypted = b""

        while True:
            chunk = encrypted.read(16)
            if len(chunk) == 0:  # end of file
                break
            decrypted_chunk = bytearray(
                aes_cbc_256.decrypt_block(list(bytearray(chunk))))
            decrypted += decrypted_chunk
            # print(f"{len(decrypted)}")

        with open("regulation.bin.decrypted", "wb") as f:
            f.write(decrypted)
        with open("regulation.bin.decryptednodcx", "wb") as f:
            f.write(decompress(decrypted[16:])[0])

        return decrypted
Example #5
0
    def unpack(self, reader: BinaryReader, remove_empty_entries=True):
        header = reader.unpack_struct(self.HEADER_STRUCT)

        # Groups of contiguous text string IDs are defined by ranges (first ID, last ID) to save space.
        ranges = reader.unpack_structs(self.RANGE_STRUCT,
                                       count=header["range_count"])
        if reader.position != header["string_offsets_offset"]:
            _LOGGER.warning(
                "Range data did not end at string data offset given in FMG header."
            )
        string_offsets = reader.unpack_structs(self.STRING_OFFSET_STRUCT,
                                               count=header["string_count"])

        # Text pointer table corresponds to all the IDs (joined together) of the above ranges, in order.
        for string_range in ranges:
            i = string_range["first_index"]
            for string_id in range(string_range["first_id"],
                                   string_range["last_id"] + 1):
                if string_id in self.entries:
                    raise ValueError(
                        f"Malformed FMG: Entry index {string_id} appeared more than once."
                    )
                string_offset = string_offsets[i]["offset"]
                if string_offset == 0:
                    if not remove_empty_entries:
                        # Empty text string. These will trigger in-game error messages, like ?PlaceName?.
                        # Distinct from ' ', which is intentionally blank text data (e.g. the unused area subtitles).
                        self.entries[string_id] = ""
                else:
                    string = reader.unpack_string(offset=string_offset,
                                                  encoding="utf-16le")
                    if string or not remove_empty_entries:
                        self.entries[string_id] = string
                i += 1
Example #6
0
    def unpack(self, reader: BinaryReader, **kwargs):
        data = reader.unpack_struct(self.STRUCT)
        self.version = FSBHeaderVersion(data["version"])
        self.mode_flags = data["mode_flags"]
        self.bank_hash = struct.unpack(">Q", data["bank_hash"])[0]
        self.guid = "-".join((
            data["_guid"][3::-1].hex(),  # first three chunks need to be reversed
            data["_guid"][5:3:-1].hex(),
            data["_guid"][7:5:-1].hex(),
            data["_guid"][8:10].hex(),
            data["_guid"][10:].hex(),
        ))

        data_offset = reader.position + data["sample_headers_size"]
        file_size = data_offset + data["sample_data_size"]

        self.samples = []
        for i in range(data["sample_count"]):
            if self.mode_flags & FSBHeaderMode.BASICHEADERS and i > 0:
                # Clone of first sample, with new length/compressed length information.
                sample = copy.deepcopy(self.samples[0])
                basic_sample_header = reader.unpack_struct(self.BASIC_SAMPLE_STRUCT)
                sample.header.length = basic_sample_header["length"]
                sample.header.compressed_length = basic_sample_header["length"]
            else:
                # New sample.
                sample = FSBSample.unpack_from(reader, data_offset=data_offset)
                data_offset += sample.header.compressed_length
            self.samples.append(sample)
        if data_offset != file_size:
            raise ValueError(f"Sample data end offset ({data_offset}) does not equal expected file size ({file_size}).")
Example #7
0
    def unpack(self, reader: BinaryReader, **kwargs):
        entry_count, entry_header_size, hash_table_offset, data_offset = self.unpack_header(
            reader)

        flags_header_size = self.flags.get_bnd_entry_header_size()
        if entry_header_size != flags_header_size:
            raise ValueError(
                f"Expected BND entry header size {flags_header_size} based on flags\n"
                f"{self.flags:08b}, but BND header says {entry_header_size}.")
        if self.hash_table_type != 4 and hash_table_offset != 0:
            _LOGGER.warning(
                f"Found non-zero hash table offset {hash_table_offset}, but header says this BND has no hash "
                f"table.")

        entry_headers = [
            BinderEntryHeader.from_bnd4_reader(reader, self.flags,
                                               self.bit_big_endian,
                                               self.unicode)
            for _ in range(entry_count)
        ]
        for entry_header in entry_headers:
            self.add_entry(BinderEntry.from_header(reader, entry_header))

        if self.hash_table_type == 4:
            # Save the initial hash table.
            reader.seek(hash_table_offset)
            self._most_recent_hash_table = reader.read(data_offset -
                                                       hash_table_offset)
        self._most_recent_entry_count = len(self._entries)
        self._most_recent_paths = [entry.path for entry in self._entries]
Example #8
0
    def unpack_event_dict(
        cls,
        reader: BinaryReader,
        instruction_table_offset,
        base_arg_data_offset,
        event_arg_table_offset,
        event_layers_table_offset,
        count=1,
    ) -> dict[int, Event]:
        event_dict = {}
        struct_dicts = reader.unpack_structs(cls.HEADER_STRUCT, count=count)

        for d in struct_dicts:
            reader.seek(instruction_table_offset +
                        d["first_instruction_offset"])
            instruction_list = cls.Instruction.unpack(
                reader,
                base_arg_data_offset,
                event_layers_table_offset,
                count=d["instruction_count"])

            reader.seek(event_arg_table_offset + d["first_event_arg_offset"])
            event_args = cls.EventArg.unpack(reader,
                                             count=d["event_arg_count"])

            for arg_r in event_args:
                # Attach event arg replacements to their instruction line.
                instruction_list[arg_r.line].event_args.append(arg_r)

            event_dict[d["event_id"]] = cls(d["event_id"], d["restart_type"],
                                            instruction_list)

        return event_dict
Example #9
0
def decompress(dcx_source: ReadableTyping) -> tuple[bytes, DCXType]:
    """Decompress the given file path, raw bytes, or buffer/reader.

    Returns a tuple containing the decompressed `bytes` and a `DCXInfo` instance that can be used to compress later
    with the same DCX type/parameters.
    """
    reader = BinaryReader(dcx_source, byte_order=">")  # always big-endian
    dcx_type = DCXType.detect(reader)

    if dcx_type == DCXType.Unknown:
        raise ValueError("Unknown DCX type. Cannot decompress.")

    header = reader.unpack_struct(DCX_HEADER_STRUCTS[dcx_type], byte_order=">")
    compressed = reader.read(
        header["compressed_size"])  # TODO: do I need to rstrip nulls?

    if dcx_type == DCXType.DCX_KRAK:
        decompressed = oodle.decompress(compressed,
                                        header["decompressed_size"])
    else:
        decompressed = zlib.decompressobj().decompress(compressed)

    if len(decompressed) != header["decompressed_size"]:
        raise ValueError(
            "Decompressed DCX data size does not match size in header.")
    return decompressed, dcx_type
Example #10
0
    def unpack(self, msb_reader: BinaryReader, **kwargs):
        """Unpack an MSB from the given reader."""

        # Read (and ignore) constant header, if applicable.
        if self.HEADER:
            msb_reader.seek(msb_reader.position + len(self.HEADER))

        self.models = self.MODEL_LIST_CLASS(msb_reader)
        self.events = self.EVENT_LIST_CLASS(msb_reader)
        self.regions = self.REGION_LIST_CLASS(msb_reader)
        self.parts = self.PART_LIST_CLASS(msb_reader)

        model_names = self.models.set_and_get_unique_names()
        environment_names = self.events.get_entry_names(
            MSBEventSubtype.Environment)
        region_names = self.regions.set_and_get_unique_names()
        part_names = self.parts.set_and_get_unique_names()
        collision_names = self.parts.get_entry_names(MSBPartSubtype.Collision)

        self.events.set_names(region_names=region_names, part_names=part_names)
        self.parts.set_names(
            model_names=model_names,
            environment_names=environment_names,
            region_names=region_names,
            part_names=part_names,
            collision_names=collision_names,
        )
Example #11
0
 def unpack_strings(self) -> list[tuple[str, str]]:
     strings = []
     string_reader = BinaryReader(self.packed_strings)
     while string_reader.position != len(self.packed_strings):
         offset = string_reader.position
         string = string_reader.unpack_string(encoding=self.STRING_ENCODING)
         strings.append((str(offset), string))
     return strings
Example #12
0
 def unpack_from(cls, reader: BinaryReader):
     float_struct = cls()
     float_struct.unk0 = reader.unpack_value("i")
     length = reader.unpack_value("i")
     if length < 0 or length % 4:
         raise ValueError(
             f"Unexpected `FloatStruct` length: {length}. Expected a multiple of 4 (or 0)."
         )
     float_struct.values = list(reader.unpack(f"{length // 4}f"))
Example #13
0
 def _check_big_endian(reader: BinaryReader):
     with reader.temp_offset(4):
         endian = reader.unpack_value("i")
     if endian == 0x1000000:
         return True
     elif endian == 0x1:
         return False
     raise ValueError(
         f"Invalid marker for LuaInfo byte order: {hex(endian)}")
Example #14
0
    def unpack(self, reader: BinaryReader):
        buffer_layout = reader.unpack_struct(self.STRUCT)

        with reader.temp_offset(buffer_layout.pop("__member_offset")):
            struct_offset = 0
            self.members = []
            for _ in range(buffer_layout.pop("__member_count")):
                member = LayoutMember(reader, struct_offset=struct_offset)
                self.members.append(member)
                struct_offset += member.layout_type.size()
Example #15
0
    def unpack(self, emevd_reader: BinaryReader, **kwargs):
        header = emevd_reader.unpack_struct(self.HEADER_STRUCT)

        emevd_reader.seek(header["event_table_offset"])
        event_dict = self.Event.unpack_event_dict(
            emevd_reader,
            header["instruction_table_offset"],
            header["base_arg_data_offset"],
            header["event_arg_table_offset"],
            header["event_layers_table_offset"],
            count=header["event_count"],
        )

        self.events.update(event_dict)

        if header["packed_strings_size"] != 0:
            emevd_reader.seek(header["packed_strings_offset"])
            self.packed_strings = emevd_reader.read(
                header["packed_strings_size"])

        if header["linked_files_count"] != 0:
            emevd_reader.seek(header["linked_files_table_offset"])
            # These are relative offsets into the packed string data.
            for _ in range(header["linked_files_count"]):
                self.linked_file_offsets.append(
                    struct.unpack("<Q", emevd_reader.read(8))[0])

        # Parse event args for `RunEvent` and `RunCommonEvent` instructions.
        for event in self.events.values():
            event.update_evs_function_args()
        for event in self.events.values():
            event.update_run_event_instructions()
Example #16
0
    def unpack(self, msb_reader: BinaryReader):
        part_offset = msb_reader.position

        header = msb_reader.unpack_struct(self.PART_HEADER_STRUCT)
        if header["__part_type"] != self.ENTRY_SUBTYPE:
            raise ValueError(f"Unexpected part type enum {header['part_type']} for class {self.__class__.__name__}.")
        self._instance_index = header["_instance_index"]
        self._model_index = header["_model_index"]
        self._part_type_index = header["_part_type_index"]
        for transform in ("translate", "rotate", "scale"):
            setattr(self, transform, Vector3(header[transform]))
        self._draw_groups = int_group_to_bit_set(header["__draw_groups"], assert_size=8)
        self._display_groups = int_group_to_bit_set(header["__display_groups"], assert_size=8)
        self._backread_groups = int_group_to_bit_set(header["__backread_groups"], assert_size=8)
        self.description = msb_reader.unpack_string(
            offset=part_offset + header["__description_offset"], encoding="utf-16-le",
        )
        self.name = msb_reader.unpack_string(
            offset=part_offset + header["__name_offset"], encoding="utf-16-le",
        )
        self.sib_path = msb_reader.unpack_string(
            offset=part_offset + header["__sib_path_offset"], encoding="utf-16-le",
        )

        msb_reader.seek(part_offset + header["__base_data_offset"])
        base_data = msb_reader.unpack_struct(self.PART_BASE_DATA_STRUCT)
        self.set(**base_data)

        msb_reader.seek(part_offset + header["__type_data_offset"])
        self.unpack_type_data(msb_reader)

        self._unpack_gparam_data(msb_reader, part_offset, header)
        self._unpack_scene_gparam_data(msb_reader, part_offset, header)
Example #17
0
 def unpack(self, reader: BinaryReader, **kwargs):
     self.big_endian, self.use_struct_64 = self._check_big_endian_and_struct_64(
         reader)
     fmt = f"{'>' if self.big_endian else '<'}{'q' if self.use_struct_64 else 'i'}"
     read_size = struct.calcsize(fmt)
     self.names = []
     offset = None
     while offset != 0:
         (offset, ) = struct.unpack(fmt, reader.read(read_size))
         if offset != 0:
             self.names.append(
                 reader.unpack_string(offset=offset,
                                      encoding=self.encoding))
Example #18
0
    def unpack(cls, reader: BinaryReader, event_layers_offset):
        """Unpack event layer bit field as <a, b, c, ...> where a, b, c, ... are the little-endian bit
        zero-based indices of the event layer bit field. 

        e.g. field 01001...110 would be {1, 4, 29, 30}.
        """
        reader.seek(event_layers_offset)
        d = reader.unpack_struct(cls.HEADER_STRUCT)
        enabled_event_layers_list = []
        for i in range(32):
            if (2 ** i) & d["event_layers"]:
                enabled_event_layers_list.append(i)
        return cls(enabled_event_layers_list)
Example #19
0
    def unpack(self,
               reader: BinaryReader,
               bounding_box_has_unknown: bool = None):
        mesh = reader.unpack_struct(self.STRUCT)

        bounding_box_offset = mesh.pop("__bounding_box_offset")
        if bounding_box_offset == 0:
            self.bounding_box = None
        else:
            with reader.temp_offset(bounding_box_offset):
                self.bounding_box = BoundingBoxWithUnknown(
                    reader) if bounding_box_has_unknown else BoundingBox(
                        reader)

        bone_count = mesh.pop("__bone_count")
        with reader.temp_offset(mesh.pop("__bone_offset")):
            self.bone_indices = list(reader.unpack(f"<{bone_count}i"))

        face_set_count = mesh.pop("__face_set_count")
        with reader.temp_offset(mesh.pop("__face_set_offset")):
            self._face_set_indices = list(reader.unpack(f"<{face_set_count}i"))

        vertex_count = mesh.pop("__vertex_buffer_count")
        with reader.temp_offset(mesh.pop("__vertex_buffer_offset")):
            self._vertex_buffer_indices = list(
                reader.unpack(f"<{vertex_count}i"))

        self.set(**mesh)
Example #20
0
    def unpack_value(self, reader: BinaryReader) -> PropertyValueTyping:
        if self in {self.Bytes, self.String}:
            raw_data_size = reader.unpack_value("i")
            value = reader.read(raw_data_size)
            if self == PropertyType.String:
                # Assumed encoding (could also be "ascii").
                return value.decode("utf-8")  # str
            return value  # bytes

        if "Array" not in self.name:
            return reader.unpack_value(self.get_fmt())

        array_length, is_compressed, compressed_size = reader.unpack("III")
        if is_compressed:
            decompressed_size = self.get_size(array=True) * array_length
            decompressed = zlib.decompressobj().decompress(reader.read(compressed_size))
            if len(decompressed) != decompressed_size:
                raise ValueError(
                    f"FBX property decompressed data size ({len(decompressed)}) does not match expected size "
                    f"({decompressed_size})"
                )
            array_reader = BinaryReader(decompressed)
        else:
            array_reader = reader
        fmt = self.get_fmt(array_length)
        return list(array_reader.unpack(fmt))
Example #21
0
 def unpack(self, msb_reader: BinaryReader):
     model_offset = msb_reader.position
     model_data = msb_reader.unpack_struct(self.MODEL_STRUCT)
     self.name = msb_reader.unpack_string(
         offset=model_offset + model_data["__name_offset"], encoding=self.NAME_ENCODING
     )
     self.sib_path = msb_reader.unpack_string(
         offset=model_offset + model_data["__sib_path_offset"], encoding=self.NAME_ENCODING,
     )
     try:
         self.ENTRY_SUBTYPE = MSBModelSubtype(model_data["__model_type"])
     except TypeError:
         raise ValueError(f"Unrecognized MSB model type: {model_data['__model_type']}")
     self.set(**model_data)
Example #22
0
 def _check_use_struct_64(reader: BinaryReader, goal_count):
     if goal_count == 0:
         raise LuaError(
             "Cannot detect `LuaInfo` version if no goals are present.")
     elif goal_count >= 2:
         return reader.unpack_value("i", offset=0x24) == 0
     else:
         # Hacky check if there's only one goal.
         if reader.unpack_value("i", offset=0x18) == 0x28:
             return True
         if reader.unpack_value("i", offset=0x14) == 0x20:
             return False
         raise ValueError(
             "Found unexpected data while trying to detect `LuaInfo` version from single goal."
         )
Example #23
0
 def unpack(self, msb_reader: BinaryReader):
     model_offset = msb_reader.position
     header = msb_reader.unpack_struct(self.MODEL_STRUCT)
     self.name = msb_reader.unpack_string(offset=model_offset +
                                          header["__name_offset"],
                                          encoding=self.NAME_ENCODING)
     self.sib_path = msb_reader.unpack_string(
         offset=model_offset + header["__sib_path_offset"],
         encoding=self.NAME_ENCODING,
     )
     if header["__model_type"] != self.ENTRY_SUBTYPE.value:
         raise ValueError(
             f"Unexpected MSB model type value {header['__model_type']} for {self.__class__.__name__}. "
             f"Expected {self.ENTRY_SUBTYPE.value}.")
     self.set(**header)
Example #24
0
def get_instruction_args(reader: BinaryReader, category, index,
                         first_arg_offset, event_args_size, emedf: dict):
    """Process instruction arguments (required and optional) from EMEVD binary."""

    try:
        emedf_args_info = emedf[category, index]["args"]
    except KeyError:
        raise KeyError(
            f"Could not find instruction ({category}, {index}) in `Instruction.EMEDF`."
        )
    previous_offset = reader.position
    if event_args_size == 0:
        return "", []
    try:
        args_format = "@" + "".join(arg["internal_type"].get_fmt()
                                    for arg in emedf_args_info.values())
    except KeyError:
        raise KeyError(
            f"Cannot find argument types for instruction {category}[{index:02d}] ({event_args_size} bytes)"
        )

    # 's' arguments are actually four-byte offsets into the packed string data, though we will keep the 's' symbol.
    struct_args_format = args_format.replace("s", "I")
    required_args_size = struct.calcsize(struct_args_format)
    if required_args_size > event_args_size:
        raise ValueError(
            f"Documented size of minimum required args for instruction {category}"
            f"[{index}] is {required_args_size}, but size of args specified in EMEVD file is "
            f"only {event_args_size}.")

    reader.seek(first_arg_offset)
    args = reader.unpack(struct_args_format)

    # Additional arguments may appear for the instruction 2000[00], 'RunEvent'. These instructions are tightly packed
    # and are always aligned to 4. We read them here as unsigned integers and must actually parse the called event ID to
    # interpret them properly (done at `EMEVD` class level).

    extra_size = event_args_size - required_args_size

    opt_arg_count = extra_size // 4
    if opt_arg_count == 0:
        reader.seek(previous_offset)
        return args_format[1:], list(args)
    elif (category, index) not in _OPTIONAL_ARGS_ALLOWED:
        raise ValueError(
            f"Extra arguments found for instruction {category}[{index}], which is not permitted. Arg types may be "
            f"wrong (too short) for this instruction.\n"
            f"    required size = {required_args_size}\n"
            f"    actual size = {event_args_size}")
    elif extra_size % 4 != 0:
        raise ValueError(
            f"Error interpreting instruction {category}[{index}]: optional argument "
            f"size is not a multiple of four bytes ({extra_size}).")

    opt_args = [reader.unpack_value("<I") for _ in range(opt_arg_count)]
    reader.seek(previous_offset)
    return args_format[1:] + "|" + "I" * (extra_size //
                                          4), list(args) + opt_args
Example #25
0
    def __init__(self, msb_entry_list_source=None):
        self._entries = []

        if msb_entry_list_source is None:
            return

        if isinstance(msb_entry_list_source, (list, tuple, dict)):
            if isinstance(msb_entry_list_source, dict):
                msb_entry_list_source = [
                    msb_entry_list_source[k]
                    for k in sorted(msb_entry_list_source)
                ]
            if isinstance(msb_entry_list_source, (list, tuple)):
                for entry in msb_entry_list_source:
                    if isinstance(entry, MSBEntry):
                        self._entries.append(entry)
                    else:
                        raise TypeError(
                            "Non-MSBEntry found in source sequence for MSB.")
            return
        if isinstance(msb_entry_list_source, (bytes, io.BufferedIOBase)):
            msb_entry_list_source = BinaryReader(msb_entry_list_source)
        if isinstance(msb_entry_list_source, BinaryReader):
            self.unpack(msb_entry_list_source)
        else:
            raise TypeError(
                f"Invalid MSB entry list source: {msb_entry_list_source}")
Example #26
0
    def __init__(self, dcx_source=None, magic=()):
        """Open a ".dcx" file, which is a compressed version of any FromSoftware file type (e.g. a BND).

        Use `.data` to get the `bytes` of the uncompressed file within. The `.magic` attribute specifies information
        about the DCX header.

        Note that the `GameFile` base class handles DCX automatically.
        """

        self.dcx_path = None
        self.data = b""
        self._magic = ()
        self.magic = magic

        if dcx_source is None:
            return
        elif isinstance(dcx_source, (str, Path)):
            self.dcx_path = Path(dcx_source)
        elif isinstance(dcx_source, bytes):
            if not self.magic:
                raise ValueError(
                    f"If `dcx_source` is a `bytes` object, DCX `magic` must be given."
                )
            self.data = dcx_source
            return

        if isinstance(dcx_source,
                      (str, Path, io.BufferedIOBase, BinaryReader)):
            self.unpack(BinaryReader(dcx_source))
        else:
            raise TypeError(f"Invalid DCX source type: {type(dcx_source)}")
Example #27
0
def FMG(fmg_source, dcx_type=None, remove_empty_entries=True) -> BaseFMG:
    if fmg_source is None:
        raise ValueError(f"Cannot auto-detect FMG class from source `None`.")
    if isinstance(fmg_source, dict):
        try:
            version = fmg_source["version"]
        except KeyError:
            raise ValueError(f"No `version` key in FMG dictionary to read.")
    elif isinstance(fmg_source, GameFile.Types):
        version = BinaryReader(fmg_source).unpack_value("b",
                                                        offset=6,
                                                        relative_offset=True)
    else:
        raise ValueError(
            f"Cannot auto-detect FMG class from source type {type(fmg_source)}."
        )

    if version == 0:
        return FMG0(fmg_source,
                    dcx_type=dcx_type,
                    remove_empty_entries=remove_empty_entries)
    elif version == 1:
        return FMG1(fmg_source,
                    dcx_type=dcx_type,
                    remove_empty_entries=remove_empty_entries)
    elif version == 2:
        return FMG2(fmg_source,
                    dcx_type=dcx_type,
                    remove_empty_entries=remove_empty_entries)
    else:
        raise ValueError(f"Unrecognized FMG version: {version}")
Example #28
0
 def read(cls, reader: BinaryReader, bit_big_endian: bool) -> BinderFlags:
     """Read a byte, reverse it if necessary, and return flags integer."""
     flags = cls(reader.unpack_value("B"))
     if not bit_big_endian and not (flags.is_big_endian
                                    and not flags.has_flag_7):
         flags = cls(int(f"{flags:08b}"[::-1], 2))
     return flags
Example #29
0
    def unpack(cls, esd_reader: BinaryReader, state_machine_offset,
               count) -> dict[int, State]:
        """Unpack multiple states from the same state table.

        Returns a dictionary of states, because it's always possible (if yet unseen) that state indices are not
        contiguous. State 0 is not repeated, as it generally is in the packed table.
        """

        state_dict = {}
        esd_reader.seek(state_machine_offset)
        struct_dicts = esd_reader.unpack_structs(cls.STRUCT, count=count)

        for d in struct_dicts:
            conditions = cls.Condition.unpack(
                esd_reader,
                d["condition_pointers_offset"],
                count=d["condition_pointers_count"],
            )

            enter_commands = cls.Command.unpack(
                esd_reader,
                d["enter_commands_offset"],
                count=d["enter_commands_count"],
            )

            exit_commands = cls.Command.unpack(
                esd_reader,
                d["exit_commands_offset"],
                count=d["exit_commands_count"],
            )

            ongoing_commands = cls.Command.unpack(
                esd_reader,
                d["ongoing_commands_offset"],
                count=d["ongoing_commands_count"],
            )

            # State 0 will be overwritten when repeated at the end of the table, rather than added.
            state_dict[d["index"]] = cls(
                d["index"],
                conditions,
                enter_commands,
                exit_commands,
                ongoing_commands,
            )

        return state_dict
Example #30
0
    def detect(cls, reader: BinaryReader) -> DCXType:
        """Detect type of DCX. Resets offset when done."""
        old_offset = reader.tell()

        dcx_type = cls.Unknown

        magic = reader.unpack_value("4s")
        if magic == b"DCP\0":  # rare, only for older games and DeS test maps
            # Possible file pattern for DFLT or EDGE compression.
            dcx_fmt = reader.unpack_value("4s", offset=4)
            if dcx_fmt == b"DCP\0":
                dcx_type = cls.DCP_DFLT
            elif dcx_fmt == b"EDGE":
                dcx_type = cls.DCP_EDGE
        elif magic == b"DCX\0":
            dcx_fmt = reader.unpack_value("4s", offset=0x28)
            if dcx_fmt == b"EDGE":
                dcx_type = cls.DCX_EDGE
            elif dcx_fmt == b"DFLT":
                # Check four unknown header fields to determine DFLT subtype.
                unk04 = reader.unpack_value("i", offset=0x4)
                unk10 = reader.unpack_value("i", offset=0x10)
                unk30 = reader.unpack_value("i", offset=0x30)
                unk38 = reader.unpack_value("B", offset=0x38)
                if unk10 == 0x24:
                    dcx_type = cls.DCX_DFLT_10000_24_9
                elif unk10 == 0x44:
                    if unk04 == 0x10000:
                        dcx_type = cls.DCX_DFLT_10000_44_9
                    elif unk04 == 0x11000:
                        if unk30 == 0x8000000:
                            dcx_type = cls.DCX_DFLT_11000_44_8
                        elif unk30 == 0x9000000:
                            if unk38 == 15:
                                dcx_type = cls.DCX_DFLT_11000_44_9_15
                            elif unk38 == 0:
                                dcx_type = cls.DCX_DFLT_11000_44_9
            elif dcx_fmt == b"KRAK":  # requires `oo2core_6_win64.dll`
                dcx_type = cls.DCX_KRAK
        else:
            b0 = reader.unpack_value("B", offset=0)
            b1 = reader.unpack_value("B", offset=1)
            if b0 == 0x78 and (b1 in {0x01, 0x5E, 0x9C, 0xDA}):
                dcx_type = cls.Zlib

        reader.seek(old_offset)
        return dcx_type