def check_file_type(fp): if type(fp) == str: f = open(fp, 'rb') elif type(fp) in [bytes, bytearray]: from io import BytesIO f = BytesIO(fp) else: f = fp ret = 'raw' try: # file type check firstChars = bytearray(f.read(12)) f.seek(0) def IfFirstChars(text): return firstChars[:len(text)] == bytearray(text.encode()) if IfFirstChars("UnityFS") or IfFirstChars("UnityWeb") or IfFirstChars( "UnityRaw") or IfFirstChars("UnityArchive"): ret = 'bundle' else: from unitypack.asset import Asset asset = Asset.from_file(f) for _id, obj in asset.objects.items(): pass ret = 'asset' except Exception as e: pass if type(fp) == str: f.close() return ret
def main(filename, outpath): f = open(filename, 'rb') tabledata = Asset.from_file(f) # find the terrain height map indx = 0 for k, v in tabledata.objects.items(): if v.type == 'TerrainData': indx = k break terrainData = tabledata.objects[indx].read() ofs = terrainData['m_Heightmap'].getmemboffset('m_Heights') print('TerrainData object index is {}, offset in binary is {}'.format( indx, hex(ofs))) pixels = [] for height in terrainData['m_Heightmap']['m_Heights']: pix = int((height * 255) / MAX_HEIGHT) pixels.append((pix, pix, pix)) img = Image.new("RGB", (terrainData['m_Heightmap']['m_Width'], terrainData['m_Heightmap']['m_Height'])) img.putdata(pixels) img.save(outpath) print('done.')
def main(f, name, base=None): asset = Asset.from_file(f) if base is not None: asset.environment.base_path = base depth = 0 container = asset.objects[1].read()['m_Container'] for path, mtdt in container: if path == name: gameobject = mtdt['asset'].object print(gameobject.type, gameobject.read().name) seen.append(gameobject) fun(gameobject.read(), 1) # commented out for the Windows users' benefit #i = 0 #for mesh in meshes: # with open('/tmp/mesh{}.obj'.format(i), 'w') as f: # f.write(OBJMesh(mesh).export()) # i += 1 print('\n{} meshes\n{} texutres\n{} materials'.format(meshes, textures, materials)) return print('didn\'t find', name)
def main(f): asset = Asset.from_file(f) container = asset.objects[1].read()['m_Container'] for path, mtdt in container: t = mtdt['asset'].object.type print('{}\t{}\t{}\t{}'.format(mtdt['asset'].path_id, mtdt['asset'].file_id, t, path))
def main(f): asset = Asset.from_file(f) container = asset.objects[1].read()['m_Container'] for path, mtdt in container: print('{}\t{}\t{}\t{}'.format(mtdt['asset'].path_id, mtdt['preloadIndex'], mtdt['preloadSize'], path))
def main(f): asset = Asset.from_file(f) for id, obj in asset.objects.items(): name = '' if hasattr(obj.read(), 'name'): name = obj.read().name print('{}\t{}\t{}\t{}'.format(id, obj.type_id, obj.type, name))
def extract(self, lang='eng'): """Actually run the extraction process.""" resources_file = os.path.join(self.data_dir, 'resources.assets') if not os.path.isfile(resources_file): raise FileNotFoundError(resources_file + ' does not exists') # -------------------------------------------------------- # STEP 1 - Parse and load the item names localization file with open(resources_file, 'rb') as f: asset = Asset.from_file(f) for id, obj in asset.objects.items(): if obj.type != 'TextAsset': # We only want text objects continue d = obj.read() if d.name == 'LocalizationItems': self._parse_localization(d.script, lang=lang) break # Loop through each objects in the resources file to find the items # data config files. If found, parse them. # for id, obj in asset.objects.items(): # if obj.type_id > 0: # Ignore known object types # continue # d = obj.read() # if isinstance(d, dict) and 'm_Name' in d and d['m_Name'].endswith('_ItemData'): # print(d) # TODO # break # if not d.name.startswith('ITM_') and obj.type != 'Texture2D': # continue # image = ImageOps.flip(d.image) # image.save(d.name + '.png') # -------------------------------------------------------- # STEP 2 - Parse the items data files (they must be extracted manually from the resources.assets file) items = OrderedDict() for file in glob(os.path.join(self.items_data_dir, '*.dat')): with open(file, 'rb') as buf: item_id, item = self._parse_item_data_file(BinaryReader(buf)) if not item['name']: continue items[item_id] = item return items
def main(): print('inserting {}...'.format(ITEM_NAME)) print('modding TableData...') with open(TABLEDATA_PATH, 'rb') as f: tabledata = Asset.from_file(f) iconnum = mod_tabledata(tabledata) with open(TABLEDATA_PATH + '_new', 'wb') as outf: tabledata.save(outf) texture = Image(filename=ITEM_TEXTURE_PATH) icon = Image(filename=ITEM_ICON_PATH) icon_name = 'cosicon_{}'.format(iconnum) icon_path = 'icons/{}.png'.format(icon_name) print('icon_name: {}, icon_path: {}'.format(icon_name, icon_path)) print('modding CharTexture...') with open(CHARTEX_PATH, 'rb') as f: chartex = Asset.from_file(f) mod_texture(chartex, texture, 'texture/' + ITEM_TEXTURE_NAME + '.dds', ITEM_TEXTURE_NAME, TEMPL_TEXTURE_PATHID) with open(CHARTEX_PATH + '_new', 'wb') as outf: chartex.save(outf) print('modding Icons...') with open(ICONS_PATH, 'rb') as f: icons = Asset.from_file(f) mod_texture(icons, icon, icon_path, icon_name, TEMPL_ICON_PATHID, 'dxt5') with open(ICONS_PATH + '_new', 'wb') as outf: icons.save(outf) print('done.')
def run(self): for file in self.args.files: if self.args.as_asset or file.endswith(".assets"): with open(file, "rb") as f: asset = Asset.from_file(f) self.handle_asset(asset) continue with open(file, "rb") as f: bundle = unitypack.load(f) for asset in bundle.assets: self.handle_asset(asset) return 0
def main(f): asset = Asset.from_file(f) wasmesh = False for id, obj in asset.objects.items(): data = obj.read() if obj.type != 'Mesh': if wasmesh: break print('skipping', obj.type) continue wasmesh = True print('processing', id, data.name) mesh = extract_obj(data) with open("/tmp/out/" + data.name + ".obj", "w") as outf: write_obj(data, mesh, outf)
def altmain(f): asset = Asset.from_file(f) ids = [int(x) for x in sys.argv[2:]] for id in ids: obj = asset.objects[id] data = obj.read() print('processing', id, data.name) mesh = extract_obj(data) print("# of triangles:", len(mesh.triangles)) print("# of vertices:", len(mesh.vertices)) #print("last vertex:", max(mesh.triangles)) print("# of normals:", len(mesh.normals)) print("# of uv1s:", len(mesh.uv1)) print("# of uv2s:", len(mesh.uv2)) with open("/tmp/" + data.name + ".obj", "w") as outf: write_obj(data, mesh, outf)
def main(f, name, base=None): asset = Asset.from_file(f) if base is not None: asset.environment.base_path = base print('''digraph { graph [fontname=Arial, nodesep=0.125, ranksep=0.25]; node [fontcolor=white, fontname=Arial, height=0, shape=box, style=filled, width=0]; edge [fontname=Arial]; ''', file=outf) depth = 0 container = asset.objects[1].read()['m_Container'] for path, mtdt in container: if path == name: gameobject = mtdt['asset'].object body = gameobject.read() qid = f'{asset.name}#{gameobject.path_id}' seen.append(qid) print( f'\t"{qid}" [label="{gameobject.type} {0} {gameobject.path_id} {body.name}", color="{getcolor(gameobject.type)}"];', file=outf) handle_objbody(body, 1, qid) # uncomment (and change path if on Windows) to auto-extract all encountered meshes #i = 0 #for mesh in meshes: # with open('/tmp/mesh{}.obj'.format(i), 'w') as f: # f.write(OBJMesh(mesh).export()) # i += 1 print('}', file=outf) print('\n{} meshes\n{} texutres\n{} materials'.format( meshes, textures, materials), file=sys.stderr) return print("didn't find", name, file=sys.stderr)
def main(): p = ArgumentParser() p.add_argument("files", nargs="+") p.add_argument("-s", "--strip", action="store_true", help="Strip extractable data") args = p.parse_args(sys.argv[1:]) for k, v in unitypack.engine.__dict__.items(): if isinstance(v, type) and issubclass(v, unitypack.engine.object.Object): yaml.add_representer(v, unityobj_representer) if args.strip: yaml.add_representer(unitypack.engine.mesh.Mesh, mesh_representer) yaml.add_representer(unitypack.engine.movie.MovieTexture, movietexture_representer) yaml.add_representer(unitypack.engine.text.Shader, shader_representer) yaml.add_representer(unitypack.engine.text.TextAsset, textasset_representer) yaml.add_representer(unitypack.engine.texture.Texture2D, texture2d_representer) for file in args.files: if file.endswith(".assets"): with open(file, "rb") as f: asset = Asset.from_file(f) handle_asset(asset) continue with open(file, "rb") as f: bundle = unitypack.load(f) for asset in bundle.assets: handle_asset(asset)
import sys import os from unitypack.asset import Asset if len(sys.argv) < 3: print("This tool dumps assets from asset files for use in diffing\nUsage: " + sys.argv[0] + " assetfile.assets outputFolder\nWill extract all assets from the input file and write them to outputFolder/####.dat") exit() with open(sys.argv[1], "rb") as assetsFile: asset = Asset.from_file(assetsFile) for id, obj in asset.objects.items(): assetsFile.seek(obj.data_offset) data = assetsFile.read(obj.size) with open(os.path.join(sys.argv[2], str(id) + ".dat"), "wb") as outfile: outfile.write(data)
def run(self): files = self.args.files self.populate_files_with_resource_assets(files) self.populate_files_with_asset_levels(files) self.populate_files_with_assets(files) self.populate_files_with_asset_bundles(files) # initialize object data for art_dump_summary if self.args.art_dump_summary: self.art_dump_textures_summary = {} self.art_dump_meshes_summary = {} self.art_dump_animation_clips_summary = {} self.art_dump_shaders_summary = {} self.art_dump_textures_size = 0 self.art_dump_meshes_size = 0 self.art_dump_animation_clips_size = 0 self.art_dump_shaders_size = 0 elif self.args.dep_summary: self.dependency_db = AssetDependencyDatabase() for file in files: if self.args.dep_summary: print("Processing " + file + "...", flush=True) else: print("Processing " + file + "...", end='', flush=True) # reset the write_json_data flag during art_dump. It is flipped when there are art assets found if self.args.art_dump: self.write_json_data = False if self.args.art_dump_summary: with open(file, "r") as f: self.handle_file_for_art_dump_summary(file, f) print("Done") continue # reset the json_data dict working memory on each file self.json_data = {} self.write_json_data = True if self.args.as_asset or file.endswith(".assets") or file.find("\\level") >= 0 or self.find_built_in_assets_index(file) >= 0: with open(file, "rb") as f: asset = Asset.from_file(f) self.json_data['Path'] = file # setup the ArtDump dictionary for handle_asset_for_art_dump if self.args.art_dump: self.json_data['ArtDump'] = {} elif self.args.dep_summary: self.dependency_db.add_asset(file, asset) continue if self.args.art_dump: self.handle_asset_for_art_dump(file, asset) else: self.handle_asset(file, asset) if not self.args.dry_run: json_path = file + ".json" if self.write_json_data: with open(json_path, "w") as json_file: json_file.write(json.dumps(self.json_data, indent=4, default=json_default)) else: print("Not writing JSON...", end='') print("Done") continue with open(file, "rb") as f: bundle = unitypack.load(f) self.json_data['Path'] = bundle.path self.json_data['GeneratorVersion'] = bundle.generator_version self.json_data['CompressionType'] = str(bundle.compression_type).replace(str("CompressionType."), "") self.json_data['FileSize'] = bundle.file_size self.json_data['BlockStorageFileOffset'] = bundle.block_storage_file_offset # setup the ArtDump dictionary for handle_asset_for_art_dump if self.args.art_dump: self.json_data['ArtDump'] = {} elif self.args.dep_summary: self.dependency_db.build_from_bundle(file, bundle) continue for asset in bundle.assets: if self.args.art_dump: self.handle_asset_for_art_dump(file, asset) else: self.handle_asset(file, asset) if not self.args.dry_run: json_path = file + ".json" if self.write_json_data: with open(json_path, "w") as json_file: json_file.write(json.dumps(self.json_data, indent=4)) else: print("Not writing JSON...", end='') print("Done") if self.args.art_dump_summary: print("Writing art dump summary " + "" + "...", end='') self.write_art_dump_summary() print("Done") elif self.args.dep_summary: if self.dependency_db is not None: summary_path = self.get_output_path("dependency_summary" + ".json") print("Writing dependency summary " + "" + "...", flush=True) self.dependency_db.write_to_json_file(summary_path) print("Done") return 0
from unitypack.asset import Asset from unitypack.object import FFOrderedDict TABLEDATA_PATH = 'CustomAssetBundle-1dca92eecee4742d985b799d8226666d' XDTINDEX = 7 f = open(TABLEDATA_PATH, 'rb') tabledata = Asset.from_file(f) xdtdata = tabledata.objects[XDTINDEX].contents missionTable = xdtdata['m_pMissionTable'] questItemTable = xdtdata['m_pQuestItemTable'] missionData = missionTable['m_pMissionData'] missionStringData = missionTable['m_pMissionStringData'] journalData = missionTable['m_pJournalData'] rewardData = missionTable['m_pRewardData'] questItemData = questItemTable['m_pItemData'] questItemStringData = questItemTable['m_pItemStringData'] # Use this to manually create and store quest items def createQuestItem(name): index = len(questItemData) itemString = FFOrderedDict(0) for k, v in questItemStringData[0].items(): itemString[k] = v itemString['m_strName'] = name itemString['m_strComment'] = name
import sys from io import BytesIO import unitypack from unitypack.asset import Asset from unitypack.object import FFOrderedDict from unitypack.utils import BinaryWriter from unitypack.modding import import_mesh # CharacterSelection asset bundle f = open('CustomAssetBundle-ce09c4c9be8a046ca92e0044f22d1b99', 'rb') outf = open('CustomAssetBundle-ce09c4c9be8a046ca92e0044f22d1b99_new', 'wb') #asset = unitypack.load(f).assets[0] asset = Asset.from_file(f) #mesh = asset.objects[17].contents # Eddy Nano in FutureNano (doesn't work yet) mesh = asset.objects[12316].contents # Fish Backpack in CharacterSelecion #mesh = asset.objects[711].contents # Terrafuser engine 01 in Tutorial import_mesh(mesh, 'nano_davestrider8.obj') print( f'{len(mesh.vertices)} vertices, {len(mesh.normals)} normals, {len(mesh.uvs)} UVs, {len(mesh.index_buffer)//2} indices, {len(mesh.index_buffer)/6} triangles' ) asset.save(outf) print('done.')