def run(self, file_id: Union[str, int], forge_file_name: str, datafile_id: int, options: Union[List[dict], None] = None): if options is not None: self._options = options # should do some validation here # TODO add select directory option save_folder = pyUbiForge.CONFIG.get('dumpFolder', 'output') data = pyUbiForge.temp_files(file_id, forge_file_name, datafile_id) if data is None: logging.warning(f"Failed to find file {file_id:016X}") return data_block_name = data.file_name data_block: DataBlock = pyUbiForge.read_file(data.file) if self._options[0]["Export Method"] == 'Wavefront (.obj)': obj_handler = mesh.ObjMtl(data_block_name, save_folder) for data_block_entry_id in data_block.files: data = pyUbiForge.temp_files(data_block_entry_id) if data is None: logging.warning(f"Failed to find file {data_block_entry_id:016X}") continue if data.file_type in ('0984415E', '3F742D26'): # entity and entity group entity: Entity = pyUbiForge.read_file(data.file) if entity is None: logging.warning(f"Failed reading file {data.file_name} {data.file_id:016X}") continue for nested_file in entity.nested_files: if nested_file.file_type == 'EC658D29': # visual nested_file: Visual if '01437462' in nested_file.nested_files.keys(): # LOD selector lod_selector: LODSelector = nested_file.nested_files['01437462'] mesh_instance_data: MeshInstanceData = lod_selector.lod[self._options[0]['LOD']] elif '536E963B' in nested_file.nested_files.keys(): # Mesh instance mesh_instance_data: MeshInstanceData = nested_file.nested_files['536E963B'] else: logging.warning(f"Could not find mesh instance data for {data.file_name} {data.file_id:016X}") continue if mesh_instance_data is None: logging.warning(f"Failed to find file {data.file_name}") continue model_data = pyUbiForge.temp_files(mesh_instance_data.mesh_id) if model_data is None: logging.warning(f"Failed to find file {mesh_instance_data.mesh_id:016X}") continue model: mesh.BaseModel = pyUbiForge.read_file(model_data.file) if model is None or model.vertices is None: logging.warning(f"Failed reading model file {model_data.file_name} {model_data.file_id:016X}") continue transform = entity.transformation_matrix if len(mesh_instance_data.transformation_matrix) == 0: obj_handler.export(model, model_data.file_name, transform) else: for trm in mesh_instance_data.transformation_matrix: obj_handler.export(model, model_data.file_name, numpy.matmul(transform, trm)) logging.info(f'Exported {model_data.file_name}') else: logging.info(f'File type "{data.file_type}" is not currently supported. It has been skipped') obj_handler.save_and_close() logging.info(f'Finished exporting {data_block_name}.obj')
def run(self, forge_file_name, *_): datafile_count = len(pyUbiForge.forge_files[forge_file_name].datafiles) count = 0 for file_id in pyUbiForge.forge_files[forge_file_name].datafiles: try: pyUbiForge.temp_files(file_id, forge_file_name, file_id) except: continue count += 1 if count % 100 == 99: logging.info( f"Decompressed {round(100*count/datafile_count, 2)}% of {datafile_count} datafiles" ) logging.info("Decompressed all files")
def run(self, file_id: Union[str, int], forge_file_name: str, datafile_id: int, options: Union[List[dict], None] = None): if options is not None: self._options = options # should do some validation here file_types = [ file_type.upper() for file_type in self._options[0].get("File Types", "").split(';') ] max_count = self._options[0].get("Format Count", 1000) files_done = 0 datafiles_done = 0 datafile_count = len(pyUbiForge.forge_files[forge_file_name].datafiles) # executor = ThreadPoolExecutor() for datafile_id, datafile in random.sample( list( pyUbiForge.forge_files[forge_file_name].datafiles.items()), datafile_count): try: pyUbiForge.temp_files(datafile_id, forge_file_name, datafile_id) except: continue for file_id in datafile.files.keys(): data = pyUbiForge.temp_files(file_id, forge_file_name, datafile_id) if data is None: logging.warning(f"Failed to find file {file_id:016X}") return if data.file_type not in file_types: continue logging.info(data.file_name) # executor.submit(read_file, pyUbiForge, data, file_id) read_file(data, file_id) files_done += 1 if files_done >= max_count: break if files_done >= max_count: break if datafiles_done % 100 == 99: logging.info( f"Processed {round(100*datafiles_done/datafile_count, 2)}% of {datafile_count} datafiles" ) datafiles_done += 1
def run(self, file_id: Union[str, int], forge_file_name: str, datafile_id: int, options: Union[List[dict], None] = None): if options is not None: self._options = options # should do some validation here file_types = [ file_type.upper() for file_type in self._options[0].get("File Types", "").split(';') ] for file_id in pyUbiForge.forge_files[forge_file_name].datafiles[ datafile_id].files.keys(): data = pyUbiForge.temp_files(file_id, forge_file_name, datafile_id) if data is None: logging.warning(f"Failed to find file {file_id:016X}") return if data.file_type not in file_types: continue output = pyUbiForge.read_file(data.file) if output is None: logging.warning(data.file_name) out_file = open( os.path.join( pyUbiForge.CONFIG.get('dumpFolder', 'output'), f'{pyUbiForge.game_identifier()}_{data.file_name}_{file_id:016X}.format' ), 'w') pyUbiForge.read_file(data.file, out_file)
def get_material_ids(file_id: int) -> Material: data = pyUbiForge.temp_files(file_id) if data is None: logging.warning(f"Failed to find file {file_id:016X}") return Material(f'{file_id:016X}', missing_no=True) name = data.file_name material_set_id = pyUbiForge.read_file(data.file).material_set data = pyUbiForge.temp_files(material_set_id) if data is None: logging.warning(f"Failed to find file {material_set_id:016X}") return Material(name, missing_no=True) material = pyUbiForge.read_file(data.file) material.name = name return material
def run(self, file_id: Union[str, int], forge_file_name: str, datafile_id: int, options: Union[List[dict], None] = None): if options is not None: self._options = options # should do some validation here # TODO add select directory option save_folder = pyUbiForge.CONFIG.get('dumpFolder', 'output') data = pyUbiForge.temp_files(file_id, forge_file_name, datafile_id) if data is None: logging.warning(f"Failed to find file {file_id:016X}") return model_name = data.file_name if self._options[0]["Export Method"] == 'Wavefront (.obj)': model: mesh.BaseModel = pyUbiForge.read_file(data.file) if model is not None: obj_handler = mesh.ObjMtl(model_name, save_folder) obj_handler.export(model, model_name) obj_handler.save_and_close() logging.info(f'Exported {file_id:016X}') else: logging.warning(f'Failed to export {file_id:016X}') elif self._options[0]["Export Method"] == 'Collada (.dae)': obj_handler = mesh.Collada(model_name, save_folder) obj_handler.export(file_id, forge_file_name, datafile_id) obj_handler.save_and_close() logging.info(f'Exported {file_id:016X}') elif self._options[0]["Export Method"] == 'Send to Blender (experimental)': model: mesh.BaseModel = pyUbiForge.read_file(data.file) if model is not None: c = Client(('localhost', 6163)) cols = [Image.new('RGB', (1024, 1024), (128, 0, 0)) for _ in range(len(model.bones))] r = 5 for mesh_index, m in enumerate(model.meshes): c.send({ 'type': 'MESH', 'verts': tuple(tuple(vert) for vert in model.vertices), 'faces': tuple(tuple(face) for face in model.faces[mesh_index][:m['face_count']]) }) for vtx in model.vert_table: x, y = vtx['vt'].astype(numpy.float) / 2 for index, bone_index in enumerate(vtx['bn']): if vtx['bw'][index] > 0: draw = ImageDraw.Draw(cols[bone_index]) draw.ellipse((x - r, y - r, x + r, y + r), fill=(vtx['bw'][index], vtx['bw'][index], vtx['bw'][index])) for index, im in enumerate(cols): im.save(f'{save_folder}/{model_name}_{index}.png') print(f'saved {save_folder}/{model_name}_{index}.png') c.send({ 'type': 'BONES', 'bone_id': [bone.bone_id for bone in model.bones], 'mat': [bone.transformation_matrix for bone in model.bones], })
def run(self, file_id: Union[str, int], forge_file_name: str, datafile_id: int, options: Union[List[dict], None] = None): # TODO add select directory option save_folder = pyUbiForge.CONFIG.get('dumpFolder', 'output') data = pyUbiForge.temp_files(file_id, forge_file_name, datafile_id) if data is None: logging.warning(f"Failed to find file {file_id:016X}") return file_name = data.file_name minimap_textures: MMClass = pyUbiForge.read_file(data.file) output_image = None tile_width = tile_height = 128 for y in range(minimap_textures.width): for x in range(minimap_textures.height): texture_file_id = minimap_textures.image_ids[ y * minimap_textures.height + x] texture_data = pyUbiForge.temp_files(texture_file_id) if texture_data is None: logging.warning( f"Failed to find file {texture_file_id:016X}") continue texture: TextureClass = pyUbiForge.read_file(texture_data.file) if output_image is None: tile_width = struct.unpack('<I', texture.dwWidth)[0] tile_height = struct.unpack('<I', texture.dwHeight)[0] output_image = Image.new( 'RGBA', (tile_width * minimap_textures.width, tile_height * minimap_textures.height)) temp_image = Image.open(BytesIO(texture.dds_string)) output_image.paste( temp_image, (tile_width * x, tile_height * minimap_textures.height - tile_height * (y + 1))) logging.info(f'Written {y+1} row of {minimap_textures.width}') if output_image is None: logging.info('No Minimap to export') else: output_image.save(f'{save_folder}/{file_name}.png')
def read_id(self) -> int: file_id = self._read_struct(pyUbiForge.game_functions.file_id_datatype, False, False) if self._out_file is not None: data = pyUbiForge.temp_files(file_id) if data is None: self._out_file.write('\t\tUnknown File ID\n') else: self._out_file.write( '\t\t{data.file_name}\t{data.file_type}\n'.format( data=data)) return file_id
def run(self, file_id: Union[str, int], forge_file_name: str, datafile_id: int, options: Union[List[dict], None] = None): data = pyUbiForge.temp_files(file_id, forge_file_name, datafile_id) if data is None: logging.warning(f"Failed to find file {file_id:016X}") return out_file = open( os.path.join( pyUbiForge.CONFIG.get('dumpFolder', 'output'), f'{pyUbiForge.game_identifier()}_{data.file_name}_{file_id:016X}.bin' ), 'wb') out_file.write(data.file.read_rest())
def run(self, file_id: Union[str, int], forge_file_name: str, datafile_id: int, options: Union[List[dict], None] = None): data = pyUbiForge.temp_files(file_id, forge_file_name, datafile_id) if data is None: logging.warning(f"Failed to find file {file_id:016X}") return if not os.path.isdir(pyUbiForge.CONFIG.get('dumpFolder', 'output')): os.makedirs(pyUbiForge.CONFIG.get('dumpFolder', 'output')) out_file = open( os.path.join( pyUbiForge.CONFIG.get('dumpFolder', 'output'), f'{pyUbiForge.game_identifier()}_{data.file_name}_{file_id:016X}.format' ), 'w' ) pyUbiForge.read_file(data.file, out_file) logging.info("Finished Formatting")
def export_dds(file_id: int, save_folder: str, forge_file_name: Union[None, str] = None, datafile_id: Union[None, int] = None): data = pyUbiForge.temp_files(file_id, forge_file_name, datafile_id) if data is None: logging.warning(f"Failed to find file {file_id:016X}") return save_path = os.path.join(save_folder, f'{data.file_name}.dds') if os.path.isfile(save_path): logging.info(f'Texture "{data.file_name}" already exported') return save_path tex = pyUbiForge.read_file(data.file) tex.export_dds(save_path) logging.info(f'Texture "{data.file_name}" exported') return save_path
def query( self, plugin_level: int, file_id: Union[str, int], forge_file_name: Union[None, str] = None, datafile_id: Union[None, int] = None) -> Tuple[List[str], Union[str, int]]: """ Look up which plugins are relevant to the given entry and return their names in a list. Each plugin has a unique name which is used as the unique identifier. Give this name to the get function with the file information to run the plugin :param plugin_level: See plugin_level at the top :param file_id: The furthest down id in the list. (eg game identifier, forge file name, datafile id or actual file id) :param forge_file_name: The name of the forge file :param datafile_id: The integer value of the datafile :return: """ self._load_plugins() if plugin_level in (1, 2): return list(self._plugins[plugin_level].keys()), file_id elif plugin_level in (3, 4): file_id = int(file_id) file_type = pyUbiForge.temp_files(file_id, forge_file_name, datafile_id).file_type if plugin_level == 3: return list( set( list(self._plugins[3].keys()) + list(self._plugins[4].get(file_type, {}).keys()) + list(self._plugins[4]['*'].keys()))), file_id elif plugin_level == 4: return list( set( list(self._plugins[4].get(file_type, {}).keys()) + list(self._plugins[4]['*'].keys()))), file_id
def export(self, model_file_id: int, forge_file_name: str = None, datafile_id: int = None, transformation_matrix: numpy.ndarray = None) -> None: """ when called will load and export the mesh if it hasn't been :return: None """ if not self.is_exported(model_file_id): data = pyUbiForge.temp_files(model_file_id, forge_file_name, datafile_id) if data is None: logging.warning(f"Failed to find file {model_file_id:016X}") return model = pyUbiForge.read_file(data.file) if model is None: # sometimes reading the model fails return self._models_exported[model_file_id] = [] # write models for mesh_index, mesh in enumerate(model.meshes): faces = model.faces[mesh_index][:mesh['face_count']].ravel() new_value_slice, faces = numpy.unique(faces, return_inverse=True) vertices = model.vertices[new_value_slice] texture_vertices = model.texture_vertices[new_value_slice] geometry_id = f'{model_file_id}-mesh-{mesh_index}' model_name = f'{data.file_name}-{mesh_index}' material_name = f'{self._mtl_handler.get(model.materials[mesh_index]).name}-material' self._models_exported[model_file_id].append( [geometry_id, model_name, material_name]) self._dae.write( f''' <geometry id="{model_file_id}-mesh-{mesh_index}" name="{model_name}"> <mesh> <source id="{model_file_id}-mesh-positions-{mesh_index}"> <float_array id="{model_file_id}-mesh-positions-array-{mesh_index}" count="{vertices.size}">{plaintext_array(vertices)}</float_array> <technique_common> <accessor source="#{model_file_id}-mesh-positions-array-{mesh_index}" count="{len(vertices)}" stride="3"> <param name="X" type="float"/> <param name="Y" type="float"/> <param name="Z" type="float"/> </accessor> </technique_common> </source> ''') if model.normals is not None: normals = model.normals[new_value_slice] self._dae.write( f''' <source id="{model_file_id}-mesh-normals-{mesh_index}"> <float_array id="{model_file_id}-mesh-normals-array-{mesh_index}" count="{normals.size}">{plaintext_array(normals)}</float_array> <technique_common> <accessor source="#{model_file_id}-mesh-normals-array-{mesh_index}" count="{len(normals)}" stride="3"> <param name="X" type="float"/> <param name="Y" type="float"/> <param name="Z" type="float"/> </accessor> </technique_common> </source> ''') self._dae.write( f''' <source id="{model_file_id}-mesh-map-0-{mesh_index}"> <float_array id="{model_file_id}-mesh-map-0-array-{mesh_index}" count="{texture_vertices.size}">{plaintext_array(texture_vertices)}</float_array> <technique_common> <accessor source="#{model_file_id}-mesh-map-0-array-{mesh_index}" count="{len(texture_vertices)}" stride="2"> <param name="S" type="float"/> <param name="T" type="float"/> </accessor> </technique_common> </source> <vertices id="{model_file_id}-mesh-vertices-{mesh_index}"> <input semantic="POSITION" source="#{model_file_id}-mesh-positions-{mesh_index}"/> </vertices> <triangles material="{material_name}" count="{len(faces)}"> <input semantic="VERTEX" source="#{model_file_id}-mesh-vertices-{mesh_index}" offset="0"/> ''') if model.normals is not None: self._dae.write( f''' <input semantic="NORMAL" source="#{model_file_id}-mesh-normals-{mesh_index}" offset="0"/> ''') self._dae.write( f''' <input semantic="TEXCOORD" source="#{model_file_id}-mesh-map-0-{mesh_index}" offset="0" set="0"/> <p>{plaintext_array(faces)}</p> </triangles> </mesh> </geometry> ''') if transformation_matrix is None: transformation_matrix = numpy.array([[1, 0, 0, 0], [0, 1, 0, 0], [0, 0, 1, 0], [0, 0, 0, 1]]) for geometry_id, model_name, material_name in self._models_exported[ model_file_id]: self._library_visual_scenes.append( f''' <node id="{model_name}" name="{model_name}" type="NODE"> <matrix sid="transform">{plaintext_array(transformation_matrix)}</matrix> <instance_geometry url="#{geometry_id}" name="{model_name}"> <bind_material> <technique_common> <instance_material symbol="{material_name}" target="#{material_name}"/> </technique_common> </bind_material> </instance_geometry> </node> ''')
def run(self, *_): dict_doc = {} file_list = set([ binascii.hexlify(struct.pack('<Q', file_id)).decode("utf-8").upper() for file_id in pyUbiForge.temp_files.list_light_dictionary ]) datafile_count = 0 datafile_completed_count = 0 for forge_file_name in pyUbiForge.forge_files: datafile_count += len( pyUbiForge.forge_files[forge_file_name].datafiles) for forge_file_name, forge_file_class in pyUbiForge.forge_files.items( ): for datafile_id, datafile_class in forge_file_class.datafiles.items( ): datafile_id_hex = binascii.hexlify( struct.pack('<Q', datafile_id)).decode("utf-8").upper() try: pyUbiForge.temp_files(datafile_id, forge_file_name, datafile_id) except: continue for file_id in datafile_class.files.keys(): file_id_hex = binascii.hexlify(struct.pack( '<Q', file_id)).decode("utf-8").upper() temp_file = pyUbiForge.temp_files(file_id, forge_file_name, datafile_id) file_wrapper = temp_file.file file_wrapper.seek(9) file_type = file_wrapper.read_type() if file_id_hex not in dict_doc: dict_doc[file_id_hex] = [ temp_file.file_name, file_type, [], [] ] elif dict_doc[file_id_hex][0] is None: dict_doc[file_id_hex][0] = temp_file.file_name dict_doc[file_id_hex][1] = file_type dict_doc[file_id_hex][2].append( [forge_file_name, datafile_id_hex, []]) for potential_file_id in re.findall( b'(?=(.{4}[^\x00]\x00{3}))', file_wrapper.read_rest(), flags=re.DOTALL): potential_file_id_hex = binascii.hexlify( potential_file_id).decode("utf-8").upper() if potential_file_id_hex in file_list: # we have found a valid file reference if potential_file_id_hex not in dict_doc[ file_id_hex][2][-1][2]: dict_doc[file_id_hex][2][-1][2].append( potential_file_id_hex) if potential_file_id_hex not in dict_doc: dict_doc[potential_file_id_hex] = [ None, None, [], [] ] if file_id_hex not in dict_doc[ potential_file_id_hex][3]: dict_doc[potential_file_id_hex][3].append( file_id_hex) datafile_completed_count += 1 logging.info( f"Processed {round(100*datafile_completed_count/datafile_count, 2)}% of {datafile_count} datafiles" ) logging.info("Processed all files") with open( f"{pyUbiForge.CONFIG.get('dumpFolder', 'output')}/ACU_hierarchy.json", 'w') as f: json.dump(dict_doc, f, indent=4)