def update_names(self): logging.info("Updating MS2 name buffer") self.buffer_0.names.clear() for model_info in self.model_infos: for material in model_info.model.materials: material.name_index = self.get_name_index(material.name) if model_info.bone_info: for bone_index, bone in enumerate(model_info.bone_info.bones): model_info.bone_info.name_indices[ bone_index] = self.get_name_index(bone.name) self.update_joints(model_info.bone_info) # print(self.buffer_0.names) logging.info("Updating MS2 name hashes") # update hashes from new names self.info.name_count = len(self.buffer_0.names) self.buffer_0.name_hashes.resize(len(self.buffer_0.names)) for name_i, name in enumerate(self.buffer_0.names): self.buffer_0.name_hashes[name_i] = djb(name.lower())
def load_fgm(ovl, fgm_file_path, fgm_sized_str_entry): versions = get_versions(ovl) fgm_data = FgmFile() fgm_data.load(fgm_file_path) sizedstr_bytes = as_bytes( fgm_data.fgm_info, version_info=versions) + as_bytes( fgm_data.two_frags_pad, version_info=versions) # todo - move texpad into fragment padding? textures_bytes = as_bytes(fgm_data.textures, version_info=versions) + as_bytes( fgm_data.texpad, version_info=versions) attributes_bytes = as_bytes(fgm_data.attributes, version_info=versions) # the actual injection fgm_sized_str_entry.data_entry.update_data((fgm_data.buffer_bytes, )) fgm_sized_str_entry.pointers[0].update_data(sizedstr_bytes, update_copies=True) if len(fgm_sized_str_entry.fragments) == 4: datas = (textures_bytes, attributes_bytes, fgm_data.zeros_bytes, fgm_data.data_bytes) # fgms without zeros elif len(fgm_sized_str_entry.fragments) == 3: datas = (textures_bytes, attributes_bytes, fgm_data.data_bytes) # fgms for variants elif len(fgm_sized_str_entry.fragments) == 2: datas = (attributes_bytes, fgm_data.data_bytes) else: raise AttributeError("Unexpected fgm frag count") # inject fragment datas for frag, data in zip(fgm_sized_str_entry.fragments, datas): frag.pointers[1].update_data(data, update_copies=True) # update dependencies on ovl fgm_file_entry = get_file_entry(ovl, fgm_sized_str_entry) for dep_entry, tex_name in zip(fgm_file_entry.dependencies, fgm_data.texture_names): dep_entry.basename = tex_name dep_entry.name = dep_entry.basename + dep_entry.ext.replace(":", ".") dep_entry.file_hash = djb(tex_name.lower())
def update_names(self): logging.info("Updating MS2 name buffer") self.buffer_0.names.clear() for mdl2 in self.mdl2s.values(): for material in mdl2.materials: if material.name not in self.buffer_0.names: self.buffer_0.names.append(material.name) material.name_index = self.buffer_0.names.index(material.name) for bone_info in self.bone_infos: for bone_index, bone in enumerate(bone_info.bones): if bone.name not in self.buffer_0.names: self.buffer_0.names.append(bone.name) bone_info.name_indices[bone_index] = self.buffer_0.names.index( bone.name) for bone_info in self.bone_infos: self.update_joints(bone_info) # print(self.buffer_0.names) logging.info("Updating MS2 name hashes") # update hashes from new names self.general_info.name_count = len(self.buffer_0.names) self.buffer_0.name_hashes.resize(len(self.buffer_0.names)) for name_i, name in enumerate(self.buffer_0.names): self.buffer_0.name_hashes[name_i] = djb(name.lower())
def djb_bytes(string): return struct.pack("<I", djb(string.lower()))
def set_dependency_identity(self, dependency, file_name): """Use a standard file name with extension""" dependency.name = file_name dependency.basename, dependency.ext = os.path.splitext(file_name) dependency.ext = dependency.ext.replace(".", ":") dependency.file_hash = djb(dependency.basename.lower())
def dat_hasher(ovl, name_tups, species_mode=False): print(f"Hashing and Renaming for {name_tups}") ovl_lists = [ovl.files, ovl.dependencies, ovl.dirs] ovs_lists = [] for archive_entry in ovl.archives: content = archive_entry.content ovs_lists.extend(( content.data_entries, content.set_header.sets, content.set_header.assets, content.pools, content.sized_str_entries )) old_hash_to_new = {} old_hash_to_new_pz = {} # first go over the ovl lists to generate new hashes for i, entry_list in enumerate(ovl_lists): for entry_index, entry in enumerate(entry_list): try: if "bad hash" in entry.name: print("Skipping", entry.name, entry.file_hash) continue if species_mode: if entry.ext not in SPECIES_ONLY_FMTS: print("Skipping", entry.name, entry.file_hash) continue new_name = entry.basename for old, new in name_tups: new_name = new_name.replace(old, new) if hasattr(entry, "file_hash"): new_hash = djb(new_name) if i == 0: # only want a list of file names, dont want dirs and dependencies overriding this next loop old_hash_to_new[entry.file_hash] = (new_name, new_hash) old_hash_to_new_pz[entry_index] = (new_name, new_hash) print(f"List{i} {entry.basename} -> {new_name}, {entry.file_hash} -> {new_hash}") entry.file_hash = new_hash else: print(f"List{i} {entry.basename} -> {new_name}, [NOT HASHED]") entry.basename = new_name entry.name = entry.basename + entry.ext except Exception as err: print(err) # we do this in a second step to resolve the links for i, entry_list in enumerate(ovs_lists): for entry in entry_list: if species_mode: if entry.ext not in SPECIES_ONLY_FMTS: print("Skipping", entry.name, entry.file_hash) continue if ovl.user_version.is_jwe: new_name, new_hash = old_hash_to_new[entry.file_hash] entry.file_hash = new_hash else: new_name, new_hash = old_hash_to_new_pz[entry.file_hash] entry.basename = new_name entry.name = f"{new_name}{entry.ext}" ovl.update_names() # resort the file entries for i, file in enumerate(ovl.files): file.old_index = i # sort the different lists according to the criteria specified ovl.files.sort(key=lambda x: (x.ext, x.file_hash)) ovl.dependencies.sort(key=lambda x: x.file_hash) # create a lookup table to map the old indices to the new ones lut = {} for i, file in enumerate(ovl.files): lut[file.old_index] = i # update the file indices for dependency in ovl.dependencies: dependency.file_index = lut[dependency.file_index] for aux in ovl.aux_entries: aux.file_index = lut[aux.file_index] if ovl.user_version.is_jwe: print("JWE") else: for i, entry_list in enumerate(ovs_lists): for entry in entry_list: entry.file_hash = lut[entry.file_hash] ovl.update_ss_dict() print("Done!")