def unpack( self, buffer: io.BufferedIOBase, unicode: bool = None, version: Version = None, gx_lists: list[GXList] = None, gx_list_indices: dict[int, int] = None, ): if any(var is None for var in (unicode, version, gx_lists, gx_list_indices)): raise ValueError( "Not all required keywords were passed to `Material.unpack()`." ) data = self.STRUCT.unpack(buffer) encoding = "utf-16-le" if unicode else "shift_jis_2004" self.name = read_chars_from_buffer(buffer, offset=data.pop("__name_offset"), encoding=encoding) self.mtd_path = read_chars_from_buffer( buffer, offset=data.pop("__mtd_path_offset"), encoding=encoding) gx_offset = data.pop("__gx_offset") if gx_offset == 0: self.gx_index = -1 elif gx_offset in gx_list_indices: self.gx_index = gx_list_indices[gx_offset] else: gx_list_indices[gx_offset] = len(gx_lists) material_offset = buffer.tell() buffer.seek(gx_offset) gx_lists.append(GXList(buffer, version)) buffer.seek(material_offset) self.set(**data)
def unpack(self, buffer: io.BufferedIOBase, unicode: bool = None): data = self.STRUCT.unpack(buffer) encoding = "utf-16-le" if unicode else "shift_jis_2004" self.path = read_chars_from_buffer(buffer, offset=data.pop("__path_offset"), encoding=encoding) self.texture_type = read_chars_from_buffer( buffer, offset=data.pop("__texture_type_offset"), encoding=encoding) self.set(**data)
def unpack(self, msb_buffer): model_offset = msb_buffer.tell() model_data = self.MODEL_STRUCT.unpack(msb_buffer) self.name = read_chars_from_buffer(msb_buffer, offset=model_offset + model_data.name_offset, encoding='shift-jis') self.sib_path = read_chars_from_buffer(msb_buffer, offset=model_offset + model_data.sib_path_offset, encoding='shift-jis') self.ENTRY_TYPE = MSB_MODEL_TYPE(model_data.model_type) self._model_type_index = model_data.model_type_index self._instance_count = model_data.instance_count
def unpack(self, msb_buffer): part_offset = msb_buffer.tell() header = self.PART_HEADER_STRUCT.unpack(msb_buffer) if header.part_type != self.ENTRY_TYPE: raise ValueError( f"Unexpected part type enum {header.part_type} for class {self.__class__.__name__}." ) self._model_index = header.model_index self._part_type_index = header.part_type_index for transform in ('translate', 'rotate', 'scale'): setattr(self, transform, Vector(getattr(header, transform))) self.draw_groups = _flag_group_to_enabled_flags(header.draw_groups) self.display_groups = _flag_group_to_enabled_flags( header.display_groups) self.name = read_chars_from_buffer(msb_buffer, offset=part_offset + header.name_offset, encoding='shift-jis') self.sib_path = read_chars_from_buffer(msb_buffer, offset=part_offset + header.sib_path_offset, encoding='shift-jis') msb_buffer.seek(part_offset + header.base_data_offset) base_data = self.PART_BASE_DATA_STRUCT.unpack(msb_buffer) self.entity_id = base_data.entity_id self.ambient_light_id = base_data.ambient_light_id self.fog_id = base_data.fog_id self.scattered_light_id = base_data.scattered_light_id self.lens_flare_id = base_data.lens_flare_id self.shadow_id = base_data.shadow_id self.dof_id = base_data.dof_id self.tone_map_id = base_data.tone_map_id self.tone_correct_id = base_data.tone_correct_id self.point_light_id = base_data.point_light_id self.lod_param_id = base_data.lod_param_id self.is_shadow_source = base_data.is_shadow_source self.is_shadow_destination = base_data.is_shadow_destination self.is_shadow_only = base_data.is_shadow_only self.draw_by_reflect_cam = base_data.draw_by_reflect_cam self.draw_only_reflect_cam = base_data.draw_only_reflect_cam self.use_depth_bias_float = base_data.use_depth_bias_float self.disable_point_light_effect = base_data.disable_point_light_effect msb_buffer.seek(part_offset + header.type_data_offset) self.unpack_type_data(msb_buffer)
def unpack(cls, esd_buffer, condition_pointers_offset, count=1): """Returns a list of `Condition` instances`.""" conditions = [] if condition_pointers_offset == -1: return conditions pointers = cls.POINTER_STRUCT.unpack_count( esd_buffer, count=count, offset=condition_pointers_offset) for p in pointers: d = cls.STRUCT.unpack(esd_buffer, offset=p["condition_offset"]) pass_commands = cls.Command.unpack( esd_buffer, d["pass_commands_offset"], count=d["pass_commands_count"], ) subconditions = cls.unpack( # safe recursion esd_buffer, d["subcondition_pointers_offset"], count=d["subcondition_pointers_count"], ) test_ezl = read_chars_from_buffer(esd_buffer, offset=d["test_ezl_offset"], length=d["test_ezl_size"]) if d["next_state_offset"] > 0: next_state_index = cls.STATE_ID_STRUCT.unpack( esd_buffer, offset=d["next_state_offset"])["state_id"] else: next_state_index = -1 conditions.append( cls(next_state_index, test_ezl, pass_commands, subconditions)) return conditions
def unpack(self, buffer: io.BufferedIOBase, unicode=None): data = self.STRUCT.unpack(buffer) encoding = "utf-16-le" if unicode else "shift_jis_2004" self.name = read_chars_from_buffer(buffer, offset=data.pop("__name_offset"), encoding=encoding) self.set(**data)
def unpack(self, msb_buffer): model_offset = msb_buffer.tell() model_data = self.MODEL_STRUCT.unpack(msb_buffer) self.name = read_chars_from_buffer(msb_buffer, offset=model_offset + model_data["__name_offset"], encoding="shift-jis") self.sib_path = read_chars_from_buffer(msb_buffer, offset=model_offset + model_data["__sib_path_offset"], encoding="shift-jis") try: self.ENTRY_SUBTYPE = MSBModelSubtype(model_data["__model_type"]) except TypeError: raise ValueError( f"Unrecognized MSB model type: {model_data['__model_type']}") self.set(**model_data)
def unpack_strings(self): strings = [] string_buffer = BytesIO(self.packed_strings) while string_buffer.tell() != len(self.packed_strings): offset = string_buffer.tell() string = read_chars_from_buffer(string_buffer, reset_old_offset=False, encoding=self.STRING_ENCODING) strings.append((str(offset), string)) # repr to include double backslash return strings
def unpack_goal(self, info_buffer, goal_struct) -> LuaGoal: goal = goal_struct.unpack(info_buffer) name = read_chars_from_buffer(info_buffer, offset=goal.name_offset, encoding=self.encoding) if goal.logic_interrupt_name_offset > 0: logic_interrupt_name = read_chars_from_buffer( info_buffer, offset=goal.logic_interrupt_name_offset, encoding=self.encoding) else: logic_interrupt_name = "" return LuaGoal( goal_id=goal.goal_id, goal_name=name, has_battle_interrupt=goal.has_battle_interrupt, has_logic_interrupt=goal.has_logic_interrupt, logic_interrupt_name=logic_interrupt_name, )
def unpack(self, msb_buffer): event_offset = msb_buffer.tell() header = self.EVENT_HEADER_STRUCT.unpack(msb_buffer) if header["__event_type"] != self.ENTRY_SUBTYPE: raise ValueError(f"Unexpected MSB event type value {header['__event_type']} for {self.__class__.__name__}.") msb_buffer.seek(event_offset + header["__base_data_offset"]) base_data = self.EVENT_BASE_DATA_STRUCT.unpack(msb_buffer) name_offset = event_offset + header["__name_offset"] self.name = read_chars_from_buffer(msb_buffer, offset=name_offset, encoding=self.NAME_ENCODING) self.set(**header) self.set(**base_data) msb_buffer.seek(event_offset + header["__type_data_offset"]) self.unpack_type_data(msb_buffer)
def unpack_fields( cls, param_name: str, paramdef_buffer: io.BytesIO, field_count: int, format_version: int, unicode: bool, byte_order: str, ) -> dict[str, ParamDefField]: """Buffer should be at the start of the packed fields (which are followed by the packed descriptions).""" field_structs = cls.GET_FIELD_STRUCT(format_version, unicode, byte_order).unpack_count( paramdef_buffer, count=field_count) fields = {} for field_index, field_struct in enumerate(field_structs): if field_struct["description_offset"] != 0: field_description = read_chars_from_buffer( paramdef_buffer, offset=field_struct["description_offset"], encoding="utf-16-le" if unicode else "shift_jis_2004", ) else: field_description = "" if "display_name_offset" in field_struct: display_name = read_chars_from_buffer( paramdef_buffer, offset=field_struct["display_name_offset"], encoding="utf-16-le", ) else: display_name = field_struct["display_name"] field = cls(field_struct, field_index, field_description, param_name, display_name=display_name) fields[field.name] = field return fields
def unpack(self, msb_buffer): part_offset = msb_buffer.tell() header = self.PART_HEADER_STRUCT.unpack(msb_buffer) if header["__part_type"] != self.ENTRY_SUBTYPE: raise ValueError(f"Unexpected part type enum {header['part_type']} for class {self.__class__.__name__}.") self._model_index = header["_model_index"] self._part_type_index = header["_part_type_index"] for transform in ("translate", "rotate", "scale"): setattr(self, transform, Vector3(getattr(header, transform))) self._draw_groups = int_group_to_bit_set(header["__draw_groups"], assert_size=4) self._display_groups = int_group_to_bit_set(header["__display_groups"], assert_size=4) self.name = read_chars_from_buffer( msb_buffer, offset=part_offset + header["__name_offset"], encoding=self.NAME_ENCODING ) self.sib_path = read_chars_from_buffer( msb_buffer, offset=part_offset + header["__sib_path_offset"], encoding=self.NAME_ENCODING ) msb_buffer.seek(part_offset + header["__base_data_offset"]) base_data = self.PART_BASE_DATA_STRUCT.unpack(msb_buffer) self.set(**base_data) msb_buffer.seek(part_offset + header["__type_data_offset"]) self.unpack_type_data(msb_buffer)
def unpack(self, gnl_buffer): self.big_endian, self.use_struct_64 = self._check_big_endian_and_struct_64( gnl_buffer) fmt = f"{'>' if self.big_endian else '<'}{'q' if self.use_struct_64 else 'i'}" read_size = struct.calcsize(fmt) self.names = [] offset = None while offset != 0: (offset, ) = struct.unpack(fmt, gnl_buffer.read(read_size)) if offset != 0: self.names.append( read_chars_from_buffer(gnl_buffer, offset=offset, encoding=self.encoding))
def unpack(self, msb_buffer): event_offset = msb_buffer.tell() header = self.EVENT_HEADER_STRUCT.unpack(msb_buffer) if header.event_type != self.ENTRY_TYPE: raise ValueError(f"Unexpected event type enum {header.event_type} for class {self.__class__.__name__}.") msb_buffer.seek(event_offset + header.base_data_offset) base_data = self.EVENT_BASE_DATA_STRUCT.unpack(msb_buffer) self.name = read_chars_from_buffer(msb_buffer, offset=event_offset + header.name_offset, encoding='shift-jis') self._event_index = header.event_index self._local_event_index = header.local_event_index self._base_part_index = base_data.part_index self._base_region_index = base_data.region_index self.entity_id = base_data.entity_id msb_buffer.seek(event_offset + header.type_data_offset) self.unpack_type_data(msb_buffer)
def unpack(self, msb_buffer): region_offset = msb_buffer.tell() base_data = self.REGION_STRUCT.unpack(msb_buffer) self.name = read_chars_from_buffer( msb_buffer, offset=region_offset + base_data.name_offset, encoding='shift-jis') self._region_index = base_data.region_index self.translate = Vector(base_data.translate) self.rotate = Vector(base_data.rotate) self.check_null_field(msb_buffer, region_offset + base_data.unknown_offset_1) self.check_null_field(msb_buffer, region_offset + base_data.unknown_offset_2) if base_data.type_data_offset != 0: msb_buffer.seek(region_offset + base_data.type_data_offset) self.unpack_type_data(msb_buffer) msb_buffer.seek(region_offset + base_data.entity_id_offset) self.entity_id = struct.unpack('i', msb_buffer.read(4))[0] return region_offset + base_data.entity_id_offset
def unpack(self, msb_buffer): region_offset = msb_buffer.tell() base_data = self.REGION_STRUCT.unpack(msb_buffer) self.name = read_chars_from_buffer( msb_buffer, offset=region_offset + base_data["name_offset"], encoding="shift-jis" ) self._region_index = base_data["region_index"] self.translate = Vector3(base_data["translate"]) self.rotate = Vector3(base_data["rotate"]) self.check_null_field(msb_buffer, region_offset + base_data["unknown_offset_1"]) self.check_null_field(msb_buffer, region_offset + base_data["unknown_offset_2"]) if base_data["type_data_offset"] != 0: msb_buffer.seek(region_offset + base_data["type_data_offset"]) self.unpack_type_data(msb_buffer) msb_buffer.seek(region_offset + base_data["entity_id_offset"]) self.entity_id = struct.unpack("i", msb_buffer.read(4))[0] return region_offset + base_data["entity_id_offset"]
def unpack(self, msb_buffer): header = self.MAP_ENTITY_LIST_HEADER.unpack(msb_buffer) entry_offsets = [ self.MAP_ENTITY_ENTRY_OFFSET.unpack(msb_buffer)["entry_offset"] for _ in range(header["entry_offset_count"] - 1) ] next_entry_list_offset = self.MAP_ENTITY_LIST_TAIL.unpack( msb_buffer)["next_entry_list_offset"] self.name = read_chars_from_buffer(msb_buffer, header["name_offset"], encoding="utf-8") self._entries = [] for entry_offset in entry_offsets: msb_buffer.seek(entry_offset) entry = self.ENTRY_CLASS(msb_buffer) self._entries.append(entry) msb_buffer.seek(next_entry_list_offset)
def unpack(cls, esd_buffer, commands_offset, count=1): """ Returns a list of Command instances. """ commands = [] if commands_offset == -1: return commands struct_dicts = cls.STRUCT.unpack_count(esd_buffer, count=count, offset=commands_offset) for d in struct_dicts: if d["args_offset"] > 0: esd_buffer.seek(d["args_offset"]) arg_structs = cls.ARG_STRUCT.unpack_count(esd_buffer, count=d["args_count"]) args = [ read_chars_from_buffer(esd_buffer, offset=a["arg_ezl_offset"], length=a["arg_ezl_size"]) for a in arg_structs ] else: args = [] commands.append(cls(d["bank"], d["index"], args)) return commands
def unpack(self, paramdef_buffer, **kwargs): """Convert a paramdef file to a dictionary, indexed by ID.""" header = self.HEADER_STRUCT.unpack(paramdef_buffer) if "param_name" in header: self.param_type = header["param_name"] else: self.param_type = read_chars_from_buffer( paramdef_buffer, offset=header["param_name_offset"], encoding="shift_jis_2004", # never unicode ) self.data_version = header["data_version"] self.format_version = header["format_version"] self.unicode = header["unicode"] self.fields = self.FIELD_CLASS.unpack_fields( self.param_type, paramdef_buffer, header["field_count"], self.format_version, self.unicode, self.BYTE_ORDER, )
def unpack(self, buffer, **kwargs): self.byte_order = ">" if unpack_from_buffer(buffer, "B", 44)[0] == 255 else "<" version_info = unpack_from_buffer(buffer, f"{self.byte_order}bbb", 45) self.flags1 = ParamFlags1(version_info[0]) self.flags2 = ParamFlags2(version_info[1]) self.paramdef_format_version = version_info[2] header_struct = self.GET_HEADER_STRUCT(self.flags1, self.byte_order) header = header_struct.unpack(buffer) try: self.param_type = header["param_type"] except KeyError: self.param_type = read_chars_from_buffer( buffer, offset=header["param_type_offset"], encoding="utf-8") self.paramdef_data_version = header["paramdef_data_version"] self.unknown = header["unknown"] # Row data offset in header not used. (It's an unsigned short, yet doesn't limit row count to 5461.) name_data_offset = header[ "name_data_offset"] # CANNOT BE TRUSTED IN VANILLA FILES! Off by +12 bytes. # Load row pointer data. if self.flags1.LongDataOffset: row_pointers = self.ROW_STRUCT_64.unpack_count( buffer, count=header["row_count"]) else: row_pointers = self.ROW_STRUCT_32.unpack_count( buffer, count=header["row_count"]) row_data_offset = buffer.tell() # Reliable row data offset. # Row size is lazily determined. TODO: Unpack row data in sequence and associate with names separately. if len(row_pointers) == 0: return elif len(row_pointers) == 1: # NOTE: The only vanilla param in Dark Souls with one row is LEVELSYNC_PARAM_ST (Remastered only), # for which the row size is hard-coded here. Otherwise, we can trust the repacked offset from Soulstruct # (and SoulsFormats, etc.). if self.param_type == "LEVELSYNC_PARAM_ST": row_size = 220 else: row_size = name_data_offset - row_data_offset else: row_size = row_pointers[1]["data_offset"] - row_pointers[0][ "data_offset"] # Note that we no longer need to track buffer offset. name_encoding = self.get_name_encoding() for row_struct in row_pointers: buffer.seek(row_struct["data_offset"]) row_data = buffer.read(row_size) if row_struct["name_offset"] != 0: try: name = read_chars_from_buffer( buffer, offset=row_struct["name_offset"], encoding=name_encoding, reset_old_offset=False, # no need to reset ) except UnicodeDecodeError as ex: if ex.object in self.undecodable_row_names: name = read_chars_from_buffer( buffer, offset=row_struct["name_offset"], encoding=None, reset_old_offset=False, # no need to reset ) else: raise except ValueError: buffer.seek(row_struct["name_offset"]) _LOGGER.error( f"Error encountered while parsing row name string in {self.param_type}.\n" f" Header: {header}\n" f" Row Struct: {row_struct}\n" f" 30 chrs of name data: {' '.join(f'{{:02x}}'.format(x) for x in buffer.read(30))}" ) raise else: name = "" self.rows[row_struct["id"]] = ParamRow(row_data, self.paramdef, name=name)
def get_linked_file_names(self): names = [] for offset in self.linked_file_offsets: names.append(read_chars_from_buffer(self.packed_strings, offset=offset)) return names