def create(self): xml = self.load_xml(self.file_entry.path) self.sized_str_entry = self.create_ss_entry(self.file_entry) f0, f1 = self.create_fragments(self.sized_str_entry, 2) # first write the array data = b"" for layer in xml: layer_data = struct.pack("<6I", int(layer.attrib["flag"]), 0, 0, 0, 0, 0) data += layer_data self.write_to_pool(f1.pointers[1], 4, data) # ptr to array # 2 ptrs at start of struct self.write_to_pool(self.sized_str_entry.pointers[0], 4, struct.pack("<2Q 2Q", 0, 0, len(xml), 0)) self.ptr_relative(f0.pointers[0], self.sized_str_entry.pointers[0]) self.ptr_relative(f1.pointers[0], self.sized_str_entry.pointers[0], rel_offset=8) # first entry to name buffer self.write_to_pool(f0.pointers[1], 2, self.get_shader(xml)) # write the layers offset = 0 for layer in xml: # fgms go first if they exist if "fgm" in layer.attrib: fgm_frag = self.create_fragments(self.sized_str_entry, 1)[0] fgm_name = layer.attrib["fgm"] self.ptr_relative(fgm_frag.pointers[0], f1.pointers[1], rel_offset=offset+8) self.write_to_pool(fgm_frag.pointers[1], 2, as_bytes(fgm_name)) if "name" in layer.attrib: name = layer.attrib["name"] n_frag = self.create_fragments(self.sized_str_entry, 1)[0] self.ptr_relative(n_frag.pointers[0], f1.pointers[1], rel_offset=offset+16) self.write_to_pool(n_frag.pointers[1], 2, as_bytes(name)) offset += 24
def extract(self, out_dir, show_temp_files, progress_callback): name = self.sized_str_entry.name.replace("materialcollection", "matcol") logging.info(f"Writing {name}") out_path = out_dir(name) with open(out_path, 'wb') as outfile: # write custom matcol header outfile.write(self.pack_header(b"MATC")) outfile.write(self.sized_str_entry.pointers[0].data) outfile.write(self.sized_str_entry.f0.pointers[1].data) # these are just 8 * 00 for each ptr # outfile.write(self.sized_str_entry.tex_pointer.pointers[1].data) for tex in self.sized_str_entry.tex_frags: outfile.write(tex.pointers[1].data) # outfile.write(self.sized_str_entry.mat_pointer.pointers[1].data) for frags, data in self.sized_str_entry.mat_structs: # write root frag, always present m0 = frags[0] # counts outfile.write(as_bytes(data)) # the name of the material slot or variant outfile.write(m0.pointers[1].data) # material layers only: write info and attrib frags + children for f in frags[1:]: outfile.write(f.pointers[1].data) for c in f.children: outfile.write(as_bytes(self.p1_ztsr(c))) m = MatcolFile() m.load(out_path) print(m) return out_path,
def _get_data(self, file_path): tex_file = TexFile(self.ovl.context) tex_file.load(file_path) # print(tex_file) ss = as_bytes(tex_file.tex_info) f01 = as_bytes(tex_file.frag_01) f11 = as_bytes(tex_file.frag_11) + as_bytes(tex_file.padding) buffers = tex_file.buffers return ss, f01, f11, buffers
def load(self, ms2_file_path): logging.info(f"Injecting MS2") versions = get_versions(self.ovl) ms2_file = Ms2File() ms2_file.load(ms2_file_path, read_bytes=True) missing_materials = set() for model_info, mdl2_name, mdl2_entry in zip(ms2_file.model_infos, ms2_file.mdl_2_names, self.sized_str_entry.children): for material in model_info.model.materials: fgm_name = f"{material.name.lower()}.fgm" if ovl_versions.is_jwe(self.ovl) or ovl_versions.is_jwe2(self.ovl) and fgm_name == "airliftstraps.fgm": # don't cry about this continue if fgm_name not in self.ovl._ss_dict: missing_materials.add(fgm_name) if len(mdl2_entry.model_data_frags) != len(model_info.model.meshes): raise AttributeError( f"{mdl2_entry.name} ({len(model_info.model.meshes)}) doesn't have the " f"expected amount ({len(mdl2_entry.model_data_frags)}) of meshes!") if missing_materials: mats = '\n'.join(missing_materials) msg = f"The following materials are used by {self.file_entry.name}, but are missing from the OVL:\n" \ f"{mats}\n" \ f"This will crash unless you are importing the materials from another OVL. Inject anyway?" if not interaction.showdialog(msg, ask=True): logging.info("Injection was canceled by the user") return for mdl2_entry, model_info in zip(self.sized_str_entry.children, ms2_file.model_infos): logging.debug(f"Injecting {mdl2_entry.name} ") materials, lods, objects, meshes, model_info_ptr = mdl2_entry.fragments for frag, mdl2_list in ( (materials, model_info.model.materials,), (lods, model_info.model.lods), (objects, model_info.model.objects), (meshes, model_info.model.meshes)): if len(mdl2_list) > 0: data = as_bytes(mdl2_list, version_info=versions) # objects.pointers[1] has padding in stock, apparently as each entry is 4 bytes logging.debug(f"Injecting mdl2 data {len(data)} into {len(frag.pointers[1].data)} ({len(frag.pointers[1].padding)})") # frag.pointers[1].update_data(data, pad_to=8) # the above breaks injecting minmi frag.pointers[1].update_data(data) logging.debug(f"Result {len(frag.pointers[1].data)} ({len(frag.pointers[1].padding)})") # load ms2 ss data self.sized_str_entry.pointers[0].update_data(as_bytes(ms2_file.info, version_info=versions)) buffer_info_frag, model_info_frag, end_frag = self.sized_str_entry.fragments buffer_info_frag.pointers[1].update_data(as_bytes(ms2_file.buffer_info, version_info=versions), update_copies=True) model_info_frag.pointers[1].update_data(as_bytes(ms2_file.model_infos, version_info=versions)) # update ms2 data self.sized_str_entry.data_entry.update_data(ms2_file.buffers)
def create(self): self.sized_str_entry = self.create_ss_entry(self.file_entry) world = self.load_xml(self.file_entry.path) assetPackages = world.findall('.//AssetPackage') luaController = world.findall('.//LuaController') Prefabs = world.findall('.//Prefab') # struct size is 80 bytes: [type][ptr][count][luaptr][....] 80- ss = struct.pack("<QQQQQQQQQQ", int(world.attrib['WorldType']), 0, len(assetPackages), 0, 0, 0, 0, 0, len(Prefabs), 0) # room for 80 bytes self.write_to_pool(self.sized_str_entry.pointers[0], 2, ss) if len(assetPackages): # point the list frag to the end of the data now. new_frag0 = self.create_fragments(self.sized_str_entry, 1)[0] self.ptr_relative(new_frag0.pointers[0], self.sized_str_entry.pointers[0], 8) self.ptr_relative(new_frag0.pointers[1], self.sized_str_entry.pointers[0], 80) # for each line, add the frag ptr space and create the frag ptr assetpkg_frags = self.create_fragments(self.sized_str_entry, len(assetPackages)) for frag in assetpkg_frags: self.write_to_pool(frag.pointers[0], 2, b"\x00" * 8) for assetpkg, frag in zip(assetPackages, assetpkg_frags): self.write_to_pool(frag.pointers[1], 2, as_bytes(assetpkg.text)) if len(luaController): lua_frag = self.create_fragments(self.sized_str_entry, 1)[0] self.ptr_relative(lua_frag.pointers[0], self.sized_str_entry.pointers[0], 24) self.write_to_pool(lua_frag.pointers[1], 2, as_bytes(luaController[0].text)) if len(Prefabs): # for each line, add the frag ptr space and create the frag ptr prefab_frags = self.create_fragments(self.sized_str_entry, len(Prefabs)) for frag in prefab_frags: self.write_to_pool(frag.pointers[0], 2, b"\x00" * 8) for prefab, frag in zip(Prefabs, prefab_frags): self.write_to_pool(frag.pointers[1], 2, as_bytes(prefab.text)) # point the list frag to the end of the data now. new_frag1 = self.create_fragments(self.sized_str_entry, 1)[0] self.ptr_relative(new_frag1.pointers[0], self.sized_str_entry.pointers[0], 48) self.ptr_relative(new_frag1.pointers[1], prefab_frags[0].pointers[0])
def create(self): xml = self._get_data(self.file_entry.path) self.sized_str_entry = self.create_ss_entry(self.file_entry) # type 4 throughout root_f = self.create_fragments(self.sized_str_entry, 1)[0] array_bytes = b"" data = [] for level in xml: # ptr to start always exists f = self.create_fragments(self.sized_str_entry, 1)[0] # print(level.unlockables) followups = level.find('followups') unlockables = level.find('unlockables') # 40 bytes array_bytes += struct.pack("<5Q", 0, 0, len(followups), 0, len(unlockables)) # only create these pointers if the arrays exist # we create the arrays later once we have written the main array f_followups = self.create_fragments(self.sized_str_entry, bool(followups)) f_unlockables = self.create_fragments(self.sized_str_entry, bool(unlockables)) data.append((f, f_followups, f_unlockables, level.attrib["name"], followups, unlockables)) # write array data self.write_to_pool(root_f.pointers[1], 4, array_bytes) # now the levels offset = 0 for f, f_followup, f_unlockable, name, followups, unlockables in data: self.ptr_relative(f.pointers[0], root_f.pointers[1], offset) self.write_to_pool(f.pointers[1], 2, as_bytes(name)) if len(followups): f_followups = self.create_fragments(self.sized_str_entry, len(followups)) for f_u, f_e in zip(f_followups, followups): self.write_to_pool(f_u.pointers[0], 4, b"\x00" * 16) self.write_to_pool(f_u.pointers[1], 2, as_bytes(f_e.attrib["name"])) # make level's ptr point to start of followups region f_u_ptr = f_followup[0] self.ptr_relative(f_u_ptr.pointers[0], root_f.pointers[1], offset+8) self.ptr_relative(f_u_ptr.pointers[1], f_followups[0].pointers[0]) # point to unlockables if len(unlockables): f_unlockables = self.create_fragments(self.sized_str_entry, len(unlockables)) for f_u, f_e in zip(f_unlockables, unlockables): self.write_to_pool(f_u.pointers[0], 4, b"\x00" * 8) self.write_to_pool(f_u.pointers[1], 2, as_bytes(f_e.attrib["name"])) # make level's ptr point to start of unlockables region f_u_ptr = f_unlockable[0] self.ptr_relative(f_u_ptr.pointers[0], root_f.pointers[1], offset+24) self.ptr_relative(f_u_ptr.pointers[1], f_unlockables[0].pointers[0]) offset += 40 # write the basics - array count + its data self.write_to_pool(self.sized_str_entry.pointers[0], 4, struct.pack("<2Q", 0, len(xml))) self.ptr_relative(root_f.pointers[0], self.sized_str_entry.pointers[0])
def extract(self, out_dir, show_temp_files, progress_callback): name = self.sized_str_entry.name if not self.sized_str_entry.data_entry: raise AttributeError(f"No data entry for {name}") buffers = self.sized_str_entry.data_entry.buffer_datas if len(buffers) != 1: raise AttributeError(f"Wrong amount of buffers for {name}") logging.info(f"Writing {name}") out_path = out_dir(name) out_paths = [out_path, ] with open(out_path, 'wb') as outfile: outfile.write(self.sized_str_entry.pointers[0].data) outfile.write(buffers[0]) for bani in self.bani_files: b_ss = self.ovl.get_sized_str_entry(bani.name) logging.info(f"Writing {bani.name}") f = b_ss.fragments[0] # write banis file out_path = out_dir(bani.name) with open(out_path, 'wb') as outfile: outfile.write(b"BANI") outfile.write(as_bytes(name)) outfile.write(f.pointers[0].data) out_paths.append(out_path) return out_paths
def extract(self, out_dir, show_temp_files, progress_callback): self.get_version() name = self.sized_str_entry.name logging.info(f"Writing {name}") name_buffer, bone_infos, verts = self.get_ms2_buffer_datas() # truncate to 48 bytes for PZ af_keeperbodyparts ms2_general_info_data = self.sized_str_entry.pointers[0].data[:48] # ms2_info = self.sized_str_entry.pointers[0].load_as(Ms2SizedStrData, context=self.context)[0] ms2_header = struct.pack("<I", len(bone_infos)) # for i, buffer in enumerate(buffers): # p = out_dir(name+str(i)+".ms2") # with open(p, 'wb') as outfile: # outfile.write(buffer) # Planet coaster if self.ms2_info.version <= 39: # only ss entry holds any useful stuff ms2_buffer_info_data = b"" # Planet Zoo, JWE else: if len(self.sized_str_entry.fragments) != 3: raise AttributeError(f"{name} must have 3 fragments") buffer_info_frag, model_info_frag, end_frag = self.sized_str_entry.fragments # information on vert & tri buffer sizes ms2_buffer_info_data = buffer_info_frag.pointers[1].data # this fragment informs us about the mesh count of the next mdl2 that is read # so we can use it to collect the variable mdl2 fragments describing a mesh each model_infos = model_info_frag.pointers[1].load_as(ModelInfo, context=self.context, num=len(self.sized_str_entry.children)) # write the ms2 file out_path = out_dir(name) with open(out_path, 'wb') as outfile: outfile.write(ms2_header) outfile.write(ms2_general_info_data) for mdl2_entry in self.sized_str_entry.children: logging.debug(f"Writing {mdl2_entry.name}") outfile.write(as_bytes(mdl2_entry.basename)) outfile.write(name_buffer) # this corresponds to pc buffer 1 already outfile.write(ms2_buffer_info_data) # export each mdl2 if self.ms2_info.version > 39: outfile.write(model_info_frag.pointers[1].data) for mdl2_info, mdl2_entry in zip(model_infos, self.sized_str_entry.children): materials, lods, objects, meshes, model_info = mdl2_entry.fragments # avoid writing bad fragments that should be empty if mdl2_info.num_objects: for f in (materials, lods, objects, meshes): outfile.write(f.pointers[1].data) outfile.write(bone_infos) outfile.write(verts) # m = Ms2File() # m.load(out_path, read_editable=True) # m.save(out_path+"_.ms2") # print(m) return out_path,
def link_list_at_rel_offset(self, items_list, ref_ptr, rel_offset): """Links a list of pointers relative to rel_offset to the items""" frags = self.create_fragments(self.sized_str_entry, len(items_list)) for item, frag in zip(items_list, frags): self.ptr_relative(frag.pointers[0], ref_ptr, rel_offset=rel_offset) rel_offset += 8 self.write_to_pool(frag.pointers[1], 2, as_bytes(item))
def load(self, file_path): matcol_data = MatcolFile() matcol_data.load(file_path) if self.sized_str_entry.has_texture_list_frag: pointers = [ tex_frag.pointers[1] for tex_frag in self.sized_str_entry.tex_frags ] new_names = [ n for t in matcol_data.texture_wrapper.textures for n in (t.fgm_name, t.texture_suffix, t.texture_type) ] else: pointers = [] new_names = [] if self.sized_str_entry.is_variant: for (m0, ), variant in zip(self.sized_str_entry.mat_structs, matcol_data.variant_wrapper.materials): # print(layer.name) pointers.append(m0.pointers[1]) new_names.append(variant) elif self.sized_str_entry.is_layered: for (m0, info, attrib), layer in zip(self.sized_str_entry.mat_structs, matcol_data.layered_wrapper.layers): # print(layer.name) pointers.append(m0.pointers[1]) new_names.append(layer.name) for frag, wrapper in zip(info.children, layer.infos): frag.pointers[0].update_data(as_bytes(wrapper.info), update_copies=True) frag.pointers[1].update_data(as_bytes(wrapper.name), update_copies=True) pointers.append(frag.pointers[1]) new_names.append(wrapper.name) for frag, wrapper in zip(attrib.children, layer.attribs): frag.pointers[0].update_data(as_bytes(wrapper.attrib), update_copies=True) frag.pointers[1].update_data(as_bytes(wrapper.name), update_copies=True) pointers.append(frag.pointers[1]) new_names.append(wrapper.name) self.update_matcol_pointers(pointers, new_names)
def _get_frag_datas(self, fgm_data): versions = get_versions(self.ovl) sizedstr_bytes = as_bytes(fgm_data.fgm_info, version_info=versions) textures_bytes = as_bytes(fgm_data.textures, version_info=versions) attributes_bytes = as_bytes(fgm_data.attributes, version_info=versions) # todo - this is definitely NOT right/ needed padding by comparing to stock FGMs # no clue what the 'rule' here is, it may not be padding but be appear if another ptr is missing # textures_bytes += get_padding(len(textures_bytes), alignment=16) # attributes never seem to have padding # attributes_bytes += get_padding(len(attributes_bytes), alignment=16) fgm_header = fgm_data.fgm_info datas = [] if fgm_header.texture_count: datas.append(textures_bytes) if fgm_header.attribute_count: datas.append(attributes_bytes) datas.append(fgm_data.data_bytes) return datas, sizedstr_bytes
def load_fgm(ovl, fgm_file_path, fgm_sized_str_entry): versions = get_versions(ovl) fgm_data = FgmFile() fgm_data.load(fgm_file_path) sizedstr_bytes = as_bytes( fgm_data.fgm_info, version_info=versions) + as_bytes( fgm_data.two_frags_pad, version_info=versions) # todo - move texpad into fragment padding? textures_bytes = as_bytes(fgm_data.textures, version_info=versions) + as_bytes( fgm_data.texpad, version_info=versions) attributes_bytes = as_bytes(fgm_data.attributes, version_info=versions) # the actual injection fgm_sized_str_entry.data_entry.update_data((fgm_data.buffer_bytes, )) fgm_sized_str_entry.pointers[0].update_data(sizedstr_bytes, update_copies=True) if len(fgm_sized_str_entry.fragments) == 4: datas = (textures_bytes, attributes_bytes, fgm_data.zeros_bytes, fgm_data.data_bytes) # fgms without zeros elif len(fgm_sized_str_entry.fragments) == 3: datas = (textures_bytes, attributes_bytes, fgm_data.data_bytes) # fgms for variants elif len(fgm_sized_str_entry.fragments) == 2: datas = (attributes_bytes, fgm_data.data_bytes) else: raise AttributeError("Unexpected fgm frag count") # inject fragment datas for frag, data in zip(fgm_sized_str_entry.fragments, datas): frag.pointers[1].update_data(data, update_copies=True) # update dependencies on ovl fgm_file_entry = get_file_entry(ovl, fgm_sized_str_entry) for dep_entry, tex_name in zip(fgm_file_entry.dependencies, fgm_data.texture_names): dep_entry.basename = tex_name dep_entry.name = dep_entry.basename + dep_entry.ext.replace(":", ".") dep_entry.file_hash = djb(tex_name.lower())
def create(self): # Note: this version of create ignores specdef attributes self.sized_str_entry = self.create_ss_entry(self.file_entry) specdef = self.load_xml(self.file_entry.path) namelistdata = specdef.findall('.//Name') names = [] for name in namelistdata: names.append(name.text) attrib_count = 0 name_count = len(names) childspec_count = 0 manager_count = 0 script_count = 0 ss = struct.pack("<2H4B 7Q", attrib_count, int(specdef.attrib['Flags']), name_count, childspec_count, manager_count, script_count, 0, 0, 0, 0, 0, 0, 0) self.write_to_pool(self.sized_str_entry.pointers[0], 2, ss) # need to write 3 frags always frags = self.create_fragments(self.sized_str_entry, 3) self.write_to_pool(frags[0].pointers[0], 2, b"\x00" * 8) self.ptr_relative(frags[0].pointers[0], self.sized_str_entry.pointers[0], 8) self.ptr_relative(frags[1].pointers[0], self.sized_str_entry.pointers[0], 16) self.ptr_relative(frags[2].pointers[0], self.sized_str_entry.pointers[0], 24) self.ptr_relative(frags[0].pointers[1], self.sized_str_entry.pointers[0], 64) self.ptr_relative(frags[1].pointers[1], self.sized_str_entry.pointers[0], 64) self.ptr_relative(frags[2].pointers[1], self.sized_str_entry.pointers[0], 64) if len(names) > 0: # write features, name count root_f = self.create_fragments(self.sized_str_entry, 1)[0] self.sized_str_entry.vars = self.create_fragments( self.sized_str_entry, len(names)) # write the options for option, frag in zip(names, self.sized_str_entry.vars): self.write_to_pool(frag.pointers[1], 2, as_bytes(option)) # apparently no padding # self.sized_str_entry.vars[-1].pointers[1].pool.pad(alignment=4) for frag in self.sized_str_entry.vars: self.write_to_pool(frag.pointers[0], 4, b"\x00" * 8) self.ptr_relative(root_f.pointers[0], self.sized_str_entry.pointers[0], 32) # point to start of options array self.ptr_relative(root_f.pointers[1], self.sized_str_entry.vars[0].pointers[0])
def write_list_at_rel_offset(self, items_list, ref_ptr, rel_offset): """Writes a list of pointers and items, and reference it from a ptr at rel_offset from the ref_ptr""" if items_list: # for each line, add the frag ptr space and create the frag ptr item_frags = self.create_fragments(self.sized_str_entry, len(items_list)) for frag in item_frags: self.write_to_pool(frag.pointers[0], 2, b"\x00" * 8) for item, frag in zip(items_list, item_frags): self.write_to_pool(frag.pointers[1], 2, as_bytes(item)) # point the list frag to the end of the data now. new_frag1 = self.create_fragments(self.sized_str_entry, 1)[0] self.ptr_relative(new_frag1.pointers[0], ref_ptr, rel_offset) self.ptr_relative(new_frag1.pointers[1], item_frags[0].pointers[0])
def create(self): ss = self.get_content(self.file_entry.path) content = ss.decode('utf-8').splitlines() self.sized_str_entry = self.create_ss_entry(self.file_entry) root_f = self.create_fragments(self.sized_str_entry, 1)[0] self.sized_str_entry.vars = self.create_fragments(self.sized_str_entry, len(content)) # write the options for option, frag in zip(content, self.sized_str_entry.vars): self.write_to_pool(frag.pointers[1], 2, as_bytes(option)) # apparently no padding # self.sized_str_entry.vars[-1].pointers[1].pool.pad(alignment=4) for frag in self.sized_str_entry.vars: self.write_to_pool(frag.pointers[0], 4, b"\x00" * 8) self.write_to_pool(self.sized_str_entry.pointers[0], 4, struct.pack("<Q Q", len(content), 0)) self.ptr_relative(root_f.pointers[0], self.sized_str_entry.pointers[0], rel_offset=8) # point to start of options array self.ptr_relative(root_f.pointers[1], self.sized_str_entry.vars[0].pointers[0])
def create(self): self.sized_str_entry = self.create_ss_entry(self.file_entry) moviedef = self.load_xml(self.file_entry.path) namelistdata = moviedef.findall('.//Name') names = [] for name in namelistdata: names.append(name.text) self.ui_names = [data.text for data in moviedef.findall('.//Control')] self.assetpkgs = [data.text for data in moviedef.findall('.//AssetPackage')] self.ui_triggers = [data.text for data in moviedef.findall('.//UITrigger')] self.ui_interfaces = [data.text for data in moviedef.findall('.//Interface')] self.Count1List = [int(data.text) for data in moviedef.findall('.//List1')] self.Count2List = [int(data.text) for data in moviedef.findall('.//List2')] self.header = UiMovieHeader(self.ovl.context) self.header.flag_1 = int(moviedef.attrib['flags1']) self.header.flag_2 = int(moviedef.attrib['flags2']) self.header.flag_3 = int(moviedef.attrib['flags3']) self.header.floats[0] = float(moviedef.attrib['float1']) self.header.floats[1] = float(moviedef.attrib['float2']) self.header.floats[2] = float(moviedef.attrib['float3']) self.header.num_ui_triggers = len(self.ui_triggers) self.header.num_ui_names = len(self.ui_names) self.header.num_assetpkgs = len(self.assetpkgs) self.header.num_ui_names = len(self.ui_names) self.header.num_list_1 = len(self.Count1List) self.header.num_list_2 = len(self.Count2List) self.header.num_ui_interfaces = len(self.ui_interfaces) self.write_to_pool(self.sized_str_entry.pointers[0], 2, as_bytes(self.header)) # main names list data = (moviedef.attrib['MovieName'], moviedef.attrib['PkgName'], moviedef.attrib['CategoryName'], moviedef.attrib['TypeName']) self.link_list_at_rel_offset(data, self.sized_str_entry.pointers[0], 0) # Up to here should be enough to build almost any movie without list # time now to attach all the lists self.write_list_at_rel_offset(self.ui_triggers, self.sized_str_entry.pointers[0], 72) self.write_list_at_rel_offset(self.ui_names, self.sized_str_entry.pointers[0], 88) self.write_list_at_rel_offset(self.assetpkgs, self.sized_str_entry.pointers[0], 96) if len(self.Count1List): new_frag1 = self.create_fragments(self.sized_str_entry, 1)[0] self.ptr_relative(new_frag1.pointers[0], self.sized_str_entry.pointers[0], 112) itembytes = b'' for item in self.Count1List: itembytes += struct.pack("<I", int(item)) if len(self.Count1List) < 4: padding = 4*(4 - len(self.Count1List)) itembytes += struct.pack(f"<{padding}s", b'') self.write_to_pool(new_frag1.pointers[1], 2, itembytes) if len(self.Count2List): # point the list frag to the end of the data now. new_frag1 = self.create_fragments(self.sized_str_entry, 1)[0] self.ptr_relative(new_frag1.pointers[0], self.sized_str_entry.pointers[0], 120) itembytes = b'' for item in self.Count2List: itembytes += struct.pack("<I", int(item)) if len(self.Count2List) < 4: padding = 4*(4 - len(self.Count2List)) itembytes += struct.pack(f"<{padding}s", b'') self.write_to_pool(new_frag1.pointers[1], 2, itembytes) self.write_list_at_rel_offset(self.ui_interfaces, self.sized_str_entry.pointers[0], 128)
def create(self, ovs, file_entry): self.ovs = ovs self.ovl = ovs.ovl ms2_file = Ms2File() ms2_file.load(file_entry.path, read_bytes=True) ms2_entry = self.create_ss_entry(file_entry) ms2_entry.children = [] versions = get_versions(ovs.ovl) pool_index, pool = self.get_pool(2) offset = pool.data.tell() ms2_dir, ms2_basename = os.path.split(file_entry.path) mdl2_names = [ f for f in os.listdir(ms2_dir) if f.lower().endswith(".mdl2") ] mdl2s = [] for mdl2_name in mdl2_names: mdl2_path = os.path.join(ms2_dir, mdl2_name) mdl2 = Mdl2File() mdl2.load(mdl2_path) if mdl2.ms_2_name == ms2_basename: mdl2s.append((mdl2_name, mdl2)) # sort them by model index mdl2s.sort(key=lambda tup: tup[1].index) # create sized str entries and model data fragments for mdl2_name, mdl2 in mdl2s: mdl2_file_entry = self.get_file_entry(mdl2_name) mdl2_entry = self.create_ss_entry(mdl2_file_entry) mdl2_entry.pointers[0].pool_index = -1 mdl2_entry.pointers[0].data_offset = 0 ms2_entry.children.append(mdl2_entry) # first, create all ModelData structs as fragments mdl2_entry.model_data_frags = [ self.create_fragment() for _ in range(mdl2.model_info.num_models) ] # create the 5 fixed frags per MDL2 and write their data for (mdl2_name, mdl2), mdl2_entry in zip(mdl2s, ms2_entry.children): mdl2_entry.fragments = [self.create_fragment() for _ in range(5)] materials, lods, objects, model_data_ptr, model_info = mdl2_entry.fragments materials_offset = pool.data.tell() materials.pointers[1].pool_index = pool_index materials.pointers[1].data_offset = materials_offset pool.data.write(as_bytes(mdl2.materials, version_info=versions)) lods.pointers[1].pool_index = pool_index lods.pointers[1].data_offset = pool.data.tell() pool.data.write(as_bytes(mdl2.lods, version_info=versions)) objects.pointers[1].pool_index = pool_index objects.pointers[1].data_offset = pool.data.tell() objects_bytes = as_bytes(mdl2.objects, version_info=versions) pool.data.write(objects_bytes + get_padding(len(objects_bytes), alignment=8)) # modeldatas start here model_info.pointers[1].pool_index = pool_index model_info.pointers[1].data_offset = materials_offset # write modeldata for (mdl2_name, mdl2), mdl2_entry in zip(mdl2s, ms2_entry.children): materials, lods, objects, model_data_ptr, model_info = mdl2_entry.fragments model_data_ptr.pointers[1].pool_index = pool_index model_data_ptr.pointers[1].data_offset = pool.data.tell() # write mdl2 modeldata frags for frag, modeldata in zip(mdl2_entry.model_data_frags, mdl2.models): frag.pointers[0].pool_index = pool_index frag.pointers[0].data_offset = pool.data.tell() pool.data.write(as_bytes(modeldata, version_info=versions)) # create fragments for ms2 ms2_entry.fragments = [self.create_fragment() for _ in range(3)] # write model info for mdl2_name, mdl2 in mdl2s: model_info_bytes = as_bytes(mdl2.model_info, version_info=versions) if mdl2.index == 0: f_0, f_1, f_2 = ms2_entry.fragments f_1.pointers[1].pool_index = pool_index f_1.pointers[1].data_offset = pool.data.tell() # only write core model info pool.data.write(model_info_bytes) else: # grab the preceding mdl2 entry since it points ahead prev_mdl2_entry = ms2_entry.children[mdl2.index - 1] # get its model info fragment materials, lods, objects, model_data_ptr, model_info = prev_mdl2_entry.fragments model_info.pointers[1].pool_index = pool_index model_info.pointers[1].data_offset = pool.data.tell() # we write this anyway # todo - get the actual data pool.data.write(b"\x00" * 40) # we should only pool.data.write(model_info_bytes) this_mdl2_entry = ms2_entry.children[mdl2.index] materials, lods, objects, model_data_ptr, model_info = this_mdl2_entry.fragments for frag in (materials, lods, objects, model_data_ptr): frag.pointers[0].pool_index = pool_index frag.pointers[0].data_offset = pool.data.tell() pool.data.write(b"\x00" * 8) # write last 40 bytes to model_info if mdl2s: model_info.pointers[0].pool_index = pool_index model_info.pointers[0].data_offset = pool.data.tell() pool.data.write(b"\x00" * 40) # write the ms2 itself ms2_entry.pointers[0].pool_index = pool_index ms2_entry.pointers[0].data_offset = pool.data.tell() # load ms2 ss data ms2_ss_bytes = as_bytes( ms2_file.general_info, version_info=versions) # + ms2_entry.pointers[0].data[24:] pool.data.write(ms2_ss_bytes) # first, 3 * 8 bytes of 00 for frag in ms2_entry.fragments: frag.pointers[0].pool_index = pool_index frag.pointers[1].pool_index = pool_index frag.pointers[0].data_offset = pool.data.tell() pool.data.write(b"\x00" * 8) # now the actual data buffer_info_frag, model_info_frag, end_frag = ms2_entry.fragments buffer_info_offset = pool.data.tell() # set ptr to buffer info for each ModelData frag for mdl2_entry in ms2_entry.children: for frag in mdl2_entry.model_data_frags: frag.pointers[1].pool_index = pool_index frag.pointers[1].data_offset = buffer_info_offset # todo - from the frag log, buffer_info_bytes should be 48 bytes but is 32 buffer_info_frag.pointers[1].data_offset = buffer_info_offset buffer_info_bytes = as_bytes(ms2_file.buffer_info, version_info=versions) logging.debug(f"len(buffer_info_bytes) {len(buffer_info_bytes)}") pool.data.write(buffer_info_bytes) # the last ms2 fragment end_frag.pointers[1].data_offset = pool.data.tell() pool.data.write(struct.pack("<ii", -1, 0)) # create ms2 data self.create_data_entry( file_entry, (ms2_file.buffer_0_bytes, ms2_file.buffer_1_bytes, ms2_file.buffer_2_bytes))
def load_ms2(ovl_data, ms2_file_path, ms2_entry): logging.info(f"Injecting MS2") ms2_file = Ms2File() ms2_file.load(ms2_file_path, read_bytes=True) versions = get_versions(ovl_data) # load ms2 ss data ms2_ss_bytes = as_bytes( ms2_file.general_info, version_info=versions) + ms2_entry.pointers[0].data[24:] ms2_entry.pointers[0].update_data(ms2_ss_bytes, update_copies=True) # overwrite ms2 buffer info frag buffer_info_frag = ms2_entry.fragments[0] buffer_info_frag.pointers[1].update_data(as_bytes(ms2_file.buffer_info, version_info=versions), update_copies=True) # update ms2 data ms2_entry.data_entry.update_data([ ms2_file.buffer_0_bytes, ms2_file.buffer_1_bytes, ms2_file.buffer_2_bytes ]) logging.info(f"Injecting MDL2s") ms2_dir = os.path.dirname(ms2_file_path) mdl2s = [] for mdl2_entry in ms2_entry.children: mdl2_path = os.path.join(ms2_dir, mdl2_entry.name) mdl2 = Mdl2File() mdl2.load(mdl2_path) mdl2s.append(mdl2) if len(mdl2_entry.model_data_frags) != len(mdl2.models): raise AttributeError( f"{mdl2_entry.name} doesn't have the right amount of meshes!") # overwrite mdl2 modeldata frags for frag, modeldata in zip(mdl2_entry.model_data_frags, mdl2.models): frag_data = as_bytes(modeldata, version_info=versions) frag.pointers[0].update_data(frag_data, update_copies=True) materials, lods, objects, model_data_ptr, model_info = mdl2_entry.fragments for frag, mdl2_list in (( materials, mdl2.materials, ), (lods, mdl2.lods), (objects, mdl2.objects)): if len(mdl2_list) > 0: data = as_bytes(mdl2_list, version_info=versions) frag.pointers[1].update_data(data, update_copies=True, pad_to=8) for mdl2 in mdl2s: data = as_bytes(mdl2.model_info, version_info=versions) if mdl2.index == 0: f_0, f_1, f_2 = ms2_entry.fragments f_1.pointers[1].update_data(data, update_copies=True) else: # grab the preceding mdl2 entry since it points ahead mdl2_entry = ms2_entry.children[mdl2.index - 1] # get its model info fragment materials, lods, objects, model_data_ptr, model_info = mdl2_entry.fragments if (is_jwe(ovl_data) and model_info.pointers[0].data_size == 144) \ or (is_pz(ovl_data) and model_info.pointers[0].data_size == 160): data = model_info.pointers[0].data[:40] + data model_info.pointers[0].update_data(data, update_copies=True)
def create(self): ms2_file = Ms2File() ms2_file.load(self.file_entry.path, read_bytes=True) ms2_dir = os.path.dirname(self.file_entry.path) ms2_entry = self.create_ss_entry(self.file_entry) ms2_entry.children = [] versions = get_versions(self.ovl) # 1 for the ms2, 2 for each mdl2 # pool.num_files += 1 # create sized str entries and mesh data fragments for model_info, mdl2_name in zip(ms2_file.model_infos, ms2_file.mdl_2_names): # pool.num_files += 2 mdl2_path = os.path.join(ms2_dir, mdl2_name+".mdl2") mdl2_file_entry = self.get_file_entry(mdl2_path) mdl2_entry = self.create_ss_entry(mdl2_file_entry) mdl2_entry.pointers[0].pool_index = -1 ms2_entry.children.append(mdl2_entry) # first, create all MeshData structs as fragments mdl2_entry.model_data_frags = [self.create_fragment() for _ in range(model_info.num_meshes)] first_materials_ptr = None # create the 5 fixed frags per MDL2 and write their data for model_info, mdl2_entry in zip(ms2_file.model_infos, ms2_entry.children): mdl2_entry.fragments = [self.create_fragment() for _ in range(5)] materials, lods, objects, meshes, model_info_ptr = mdl2_entry.fragments if first_materials_ptr is None: first_materials_ptr = materials.pointers[1] self.write_to_pool(materials.pointers[1], 2, as_bytes(model_info.model.materials, version_info=versions)) self.write_to_pool(lods.pointers[1], 2, as_bytes(model_info.model.lods, version_info=versions)) objects_bytes = as_bytes(model_info.model.objects, version_info=versions) # todo - padding like this is likely wrong, probably relative to start of materials self.write_to_pool(objects.pointers[1], 2, objects_bytes + get_padding(len(objects_bytes), alignment=8)) self.write_to_pool(meshes.pointers[1], 2, as_bytes(model_info.model.meshes, version_info=versions)) self.ptr_relative(model_info_ptr.pointers[1], first_materials_ptr) # point to start of each modeldata offset = 0 for frag in mdl2_entry.model_data_frags: self.ptr_relative(frag.pointers[0], meshes.pointers[1], rel_offset=offset) offset += 64 # create fragments for ms2 buffer_info_frag, model_info_frag, end_frag = self.create_fragments(ms2_entry, 3) # write mesh info self.write_to_pool(model_info_frag.pointers[1], 2, as_bytes(ms2_file.model_infos, version_info=versions)) offset = 0 for mdl2_entry in ms2_entry.children: # byte size of modelinfo varies - JWE1 (176 bytes total) if ovl_versions.is_jwe(self.ovl): offset += 104 # 16 additional bytes for PZ/PZ16/JWE2 (192 bytes total) else: offset += 120 for frag in mdl2_entry.fragments: self.ptr_relative(frag.pointers[0], model_info_frag.pointers[1], rel_offset=offset) offset += 8 offset += 32 # buffer info data buffer_info_bytes = as_bytes(ms2_file.buffer_info, version_info=versions) self.write_to_pool(buffer_info_frag.pointers[1], 2, buffer_info_bytes) # set ptr to buffer info for each MeshData frag for mdl2_entry in ms2_entry.children: for frag in mdl2_entry.model_data_frags: self.ptr_relative(frag.pointers[1], buffer_info_frag.pointers[1]) # ms2 ss data ms2_ss_bytes = as_bytes(ms2_file.info, version_info=versions) self.write_to_pool(ms2_entry.pointers[0], 2, ms2_ss_bytes) # set frag ptr 0 for frag, offset in zip(ms2_entry.fragments, (24, 32, 40)): self.ptr_relative(frag.pointers[0], ms2_entry.pointers[0], rel_offset=offset) # the last ms2 fragment self.write_to_pool(end_frag.pointers[1], 2, struct.pack("<ii", -1, 0)) # create ms2 data self.create_data_entry(ms2_entry, ms2_file.buffers)
def _get_bani_data(self, file_path): bani = BaniFile() bani.load(file_path) return as_bytes(bani.data)
def get_shader(self, xml): _shader = _pack_name(xml.attrib["shader"]) return as_bytes(f"{xml.attrib['hash']}::{_shader}")