def testpatch2(): with open(f'{EXTRACT_ROOT_PATH}/DATA/files/Stage/D000/D000_stg_l0.arc.LZ', 'rb') as f: extracted_data = nlzss11.decompress(f.read()) stagearc = U8File.parse_u8(BytesIO(extracted_data)) roomarc = U8File.parse_u8( BytesIO(stagearc.get_file_data(f'rarc/D000_r00.arc'))) room = parseBzs(roomarc.get_file_data('dat/room.bzs')) objects = room['LAY ']['l0']['OBJS'] # find chest with id 68 and replace the content with hookshot for obj in objects: if obj['name'] == b'TBox\x00\x00\x00\x00': if (obj['unk4'] & 0xFE00) >> 9 == 68: obj['posy'] = obj['posy'] + 50 obj['unk4'] = (obj['unk4'] & 0xFE00) + 0x14 print('patched hookshot') if (obj['unk4'] & 0xFE00) >> 9 == 67: obj['posy'] = obj['posy'] + 50 obj['unk4'] = (obj['unk4'] & 0xFE00) + 0x31 print('patched gust bellows') roomarc.set_file_data('dat/room.bzs', buildBzs(room)) stagearc.set_file_data('rarc/D000_r00.arc', roomarc.to_buffer()) # add gust bellows and hookshot oarcs so they properly work with open('oarc/GetHookShot.arc', 'rb') as f: arc = f.read() stagearc.add_file_data('oarc/GetHookShot.arc', arc) with open('oarc/GetVacuum.arc', 'rb') as f: arc = f.read() stagearc.add_file_data('oarc/GetVacuum.arc', arc) with open(f'{MODIFIED_ROOT_PATH}/DATA/files/Stage/D000/D000_stg_l0.arc.LZ', 'wb') as f: f.write(nlzss11.compress(stagearc.to_buffer()))
def main() -> None: parser = argparse.ArgumentParser("nlzss11", description="Compress or decompress Nintendo LZSS11 data") parser.add_argument("-d", "--decompress", help="Decompress", action="store_true", default=False) parser.add_argument("-c", "--stdout", help="Write output to stdout", action="store_true", default=False) parser.add_argument("-l", "--level", help="Compression level (6-9)", type=int, choices=[6, 7, 8, 9], default=9) parser.add_argument("file") args = parser.parse_args() with open(args.file, "rb") as f: data = f.read() if args.decompress or args.file.endswith(".LZ"): result = nlzss11.decompress(data) name = args.file.replace(".LZ", "") if Path(name).exists(): name += ".decomp" else: result = nlzss11.compress(data, level=args.level) name = args.file + ".LZ" if args.stdout: sys.stdout.buffer.write(result) else: with open(name, "wb") as f: f.write(result)
def patch_faron(): with get_stagepath('F100', 0).open('rb') as f: extracted_data = nlzss11.decompress(f.read()) stagearc = U8File.parse_u8(BytesIO(extracted_data)) # patch layers, force layer 1 stagedef = parseBzs(stagearc.get_file_data('dat/stage.bzs')) stagedef['LYSE'] = [ OrderedDict((('story_flag', -1), ('night', 0), ('layer', 1))) ] stagearc.set_file_data('dat/stage.bzs', buildBzs(stagedef)) room0arc = U8File.parse_u8( BytesIO(stagearc.get_file_data('rarc/F100_r00.arc'))) roomdef = parseBzs(room0arc.get_file_data('dat/room.bzs')) # grab the trial from layer 3 and put in on layer 0 trial = next( filter(lambda x: x['name'] == 'WarpObj', roomdef['LAY ']['l3']['OBJ '])) trial_butterflies = next( filter(lambda x: x['name'] == 'InsctTg', roomdef['LAY ']['l3']['STAG'])) # trial['posy'] += 100 # fix object ID of trial trial['id'] = 0x02F2 trial_butterflies['id'] = 0xFEF3 roomdef['LAY ']['l3']['OBJ '].remove(trial) roomdef['LAY ']['l0']['OBJ '].append(trial) roomdef['LAY ']['l3']['STAG'].remove(trial_butterflies) roomdef['LAY ']['l0']['STAG'].append(trial_butterflies) roomdef['LAY ']['l0']['ARCN'].append('SirenEntrance') roomdef['LAY ']['l0']['ARCN'].append('PLSwordStick') roomdef['LAY ']['l0']['OBJN'].append('WarpObj') room0arc.set_file_data('dat/room.bzs', buildBzs(roomdef)) roomdat = BytesIO() room0arc.writeto(roomdat) stagearc.set_file_data('rarc/F100_r00.arc', roomdat.getbuffer()) # add the trial arc(s) with open('oarc/SirenEntrance.arc', 'rb') as f: arc = f.read() stagearc.add_file_data('oarc/SirenEntrance.arc', arc) with open('oarc/PLHarpPlay.arc', 'rb') as f: arc = f.read() stagearc.add_file_data('oarc/PLHarpPlay.arc', arc) with open('oarc/PLSwordStick.arc', 'rb') as f: arc = f.read() stagearc.add_file_data('oarc/PLSwordStick.arc', arc) stagedat = BytesIO() stagearc.writeto(stagedat) with get_stagepath('F100', 0, rootpath=MODIFIED_ROOT_PATH).open('wb') as f: f.write(nlzss11.compress(stagedat.getbuffer()))
def upgrade_test(): # patch stage with get_stagepath('D000', 0).open('rb') as f: extracted_data = nlzss11.decompress(f.read()) stagearc = U8File.parse_u8(BytesIO(extracted_data)) stagedef = parseBzs(stagearc.get_file_data('dat/stage.bzs')) room0arc = U8File.parse_u8( BytesIO(stagearc.get_file_data('rarc/D000_r00.arc'))) roomdef = parseBzs(room0arc.get_file_data('dat/room.bzs')) # get chest chest = next( filter(lambda x: x['name'] == 'TBox', roomdef['LAY ']['l0']['OBJS'])) chest['anglez'] = (chest['anglez'] & ~0x1FF) | 53 # Beetle room0arc.set_file_data('dat/room.bzs', buildBzs(roomdef)) # add both beetle models with open('oarc/GetBeetleA.arc', 'rb') as h: stagearc.add_file_data('oarc/GetBeetleA.arc', h.read()) with open('oarc/GetBeetleB.arc', 'rb') as h: stagearc.add_file_data('oarc/GetBeetleB.arc', h.read()) stagearc.set_file_data('rarc/D000_r00.arc', room0arc.to_buffer()) # write back with get_stagepath('D000', 0, rootpath=MODIFIED_ROOT_PATH).open('wb') as f: f.write(nlzss11.compress(stagearc.to_buffer())) # patch get item event with open( Path(__file__).parent / EXTRACT_ROOT_PATH / 'DATA' / 'files' / 'EU' / 'Object' / 'en_GB' / '0-Common.arc', 'rb') as f: evntarc = U8File.parse_u8(BytesIO(f.read())) itemmsbf = parseMSB(evntarc.get_file_data('0-Common/003-ItemGet.msbf')) evnt = itemmsbf['FLW3']['flow'][ 422] # event triggered after beetle text box evnt['type'] = 'type3' evnt['subType'] = 0 evnt['param1'] = 0 evnt['param3'] = 9 evnt['param2'] = 75 # Hook Beetle evntarc.set_file_data('0-Common/003-ItemGet.msbf', buildMSB(itemmsbf)) with open( Path(__file__).parent / MODIFIED_ROOT_PATH / 'DATA' / 'files' / 'EU' / 'Object' / 'en_GB' / '0-Common.arc', 'wb') as f: f.write(evntarc.to_buffer())
def do_patch(self): self.modified_extract_path.mkdir(parents=True, exist_ok=True) # stages for stagepath in (self.actual_extract_path / 'DATA' / 'files' / 'Stage').glob('*/*_stg_l*.arc.LZ'): match = STAGE_REGEX.match(stagepath.parts[-1]) stage = match[1] layer = int(match[2]) self.progress_callback(f'patching {stage} l{layer}') modified_stagepath = self.modified_extract_path / 'DATA' / 'files' / 'Stage' / f'{stage}' / f'{stage}_stg_l{layer}.arc.LZ' modified = False # remove some arcs if necessary remove_arcs = set(self.stage_oarc_delete.get((stage, layer), [])) # add additional arcs if needed additional_arcs = set(self.stage_oarc_add.get((stage, layer), [])) if remove_arcs or additional_arcs or layer == 0: # only decompress and extract files, if needed stagedata = nlzss11.decompress(stagepath.read_bytes()) stageu8 = U8File.parse_u8(BytesIO(stagedata)) # remove arcs that are already added on layer 0 if layer != 0: additional_arcs = additional_arcs - set( self.stage_oarc_add.get((stage, 0), [])) remove_arcs = remove_arcs - additional_arcs for arc in remove_arcs: stageu8.delete_file(f'oarc/{arc}.arc') modified = True for arc in additional_arcs: oarc_bytes = (self.oarc_cache_path / f'{arc}.arc').read_bytes() stageu8.add_file_data(f'oarc/{arc}.arc', oarc_bytes) modified = True if layer == 0: stagebzs = parseBzs(stageu8.get_file_data('dat/stage.bzs')) # patch stage if self.bzs_patch: newstagebzs = self.bzs_patch(stagebzs, stage, None) if newstagebzs is not None: stageu8.set_file_data('dat/stage.bzs', buildBzs(newstagebzs)) modified = True # patch rooms room_path_matches = (ROOM_REGEX.match(x) for x in stageu8.get_all_paths()) room_path_matches = (x for x in room_path_matches if not x is None) for room_path_match in room_path_matches: roomid = int(room_path_match.group('roomid')) roomdata = stageu8.get_file_data( room_path_match.group(0)) roomarc = U8File.parse_u8(BytesIO(roomdata)) roombzs = parseBzs( roomarc.get_file_data('dat/room.bzs')) roombzs = self.bzs_patch(roombzs, stage, roomid) if roombzs is not None: roomarc.set_file_data('dat/room.bzs', buildBzs(roombzs)) stageu8.set_file_data(room_path_match.group(0), roomarc.to_buffer()) modified = True # repack u8 and compress it if modified if modified: stagedata = stageu8.to_buffer() write_bytes_create_dirs(modified_stagepath, nlzss11.compress(stagedata)) # print(f'patched {stage} l{layer}') elif self.copy_unmodified: shutil.copy(stagepath, modified_stagepath) # print(f'copied {stage} l{layer}') # events and text eventrootpath = None modified_eventrootpath = None # check target language for path, lang in LANGUAGES.items(): if (self.actual_extract_path / 'DATA' / 'files' / path).exists(): eventrootpath = self.actual_extract_path / 'DATA' / 'files' / path / 'Object' / lang modified_eventrootpath = self.modified_extract_path / 'DATA' / 'files' / path / 'Object' / lang if eventrootpath == None: raise Exception('Event files not found') for eventpath in eventrootpath.glob('*.arc'): modified = False filename = eventpath.parts[-1] self.progress_callback(f'patching {filename}') modified_eventpath = modified_eventrootpath / filename eventarc = U8File.parse_u8(BytesIO(eventpath.read_bytes())) # make sure to handle text files first for labels for eventfilepath in sorted(eventarc.get_all_paths(), key=lambda x: x[-1], reverse=True): eventfilename = eventfilepath.split("/")[-1] if eventfilename.endswith('.msbf'): parsedMsb = parseMSB(eventarc.get_file_data(eventfilepath)) if self.event_patch: patchedMsb = self.event_patch(parsedMsb, eventfilename[:-5]) if patchedMsb: eventarc.set_file_data(eventfilepath, buildMSB(patchedMsb)) modified = True elif eventfilename.endswith('.msbt'): parsedMsb = parseMSB(eventarc.get_file_data(eventfilepath)) if self.event_text_patch: patchedMsb = self.event_text_patch( parsedMsb, eventfilename[:-5]) if patchedMsb: eventarc.set_file_data(eventfilepath, buildMSB(patchedMsb)) modified = True if modified: write_bytes_create_dirs(modified_eventpath, eventarc.to_buffer())
def fix_layers(): patcher = AllPatcher( actual_extract_path=Path(__file__).parent / 'actual-extract', modified_extract_path=Path(__file__).parent / 'modified-extract', oarc_cache_path=Path(__file__).parent / 'oarc', copy_unmodified=False) with open("patches.yaml") as f: patches = yaml.safe_load(f) with open("eventpatches.yaml") as f: eventpatches = yaml.safe_load(f) if not patcher.oarc_cache_path.exists(): with open("extracts.yaml") as f: extracts = yaml.safe_load(f) patcher.create_oarc_cache(extracts) stageoarcs = defaultdict(set) for stage, stagepatches in patches.items(): if stage == 'global': continue for patch in stagepatches: if patch['type'] == 'oarcadd': stageoarcs[(stage, patch['destlayer'])].add(patch['oarc']) # stageoarcs[('D000',0)].add('GetSwordA') for (stage, layer), oarcs in stageoarcs.items(): patcher.add_stage_oarc(stage, layer, oarcs) def bzs_patch_func(bzs, stage, room): stagepatches = patches.get(stage, []) modified = False if room == None: layer_patches = list(filter(lambda x: x['type']=='layeroverride', stagepatches)) if len(layer_patches) > 1: print(f"warning, multiple layer overrides for stage {stage}!") elif len(layer_patches) == 1: layer_override = [OrderedDict(story_flag=x['story_flag'], night=x['night'], layer=x['layer']) for x in layer_patches[0]['override']] bzs['LYSE'] = layer_override modified = True next_id = highest_objid(bzs) + 1 for objpatch in filter(lambda x: x['type']=='objpatch' and x.get('room',None)==room, stagepatches): id = objpatch['id'] layer = objpatch.get('layer', None) objtype = objpatch['objtype'].ljust(4) # OBJ has an whitespace but thats was too error prone for the yaml, so just pad it here objs = [x for x in bzs['LAY '][f'l{layer}'][objtype] if x['id'] == id] if len(objs) != 1: print(f'Error finding object: {json.dumps(objpatch)}') else: obj = objs[0] for key, val in objpatch['patch'].items(): if key in obj: obj[key] = val else: try_patch_obj(obj, key, val) modified = True print(f'modified object from {layer} in room {room} with id {objpatch["id"]:04X}') # print(obj) for objmove in filter(lambda x: x['type']=='objmove' and x.get('room',None)==room, stagepatches): id = objmove['id'] layer = objmove.get('layer', None) destlayer = objmove['destlayer'] objtype = objmove['objtype'].ljust(4) # OBJ has an whitespace but thats was too error prone for the yaml, so just pad it here objs = [x for x in bzs['LAY '][f'l{layer}'][objtype] if x['id'] == id] if len(objs) != 1: print(f'Error finding object: {json.dumps(objmove)}') else: obj = objs[0] bzs['LAY '][f'l{layer}'][objtype].remove(obj) obj['id'] = (obj['id'] & ~0x3FF) | next_id next_id += 1 if not objtype in bzs['LAY '][f'l{destlayer}']: bzs['LAY '][f'l{destlayer}'][objtype] = [] bzs['LAY '][f'l{destlayer}'][objtype].append(obj) objn = bzs['LAY '][f'l{destlayer}']['OBJN'] if not obj['name'] in objn: objn.append(obj['name']) modified = True print(f'moved object from {layer} to {destlayer} in room {room} with id {objmove["id"]:04X}') # print(obj) for objdelete in filter(lambda x: x['type']=='objdelete' and x.get('room',None)==room, stagepatches): id = objdelete['id'] layer = objdelete.get('layer', None) objtype = objdelete['objtype'].ljust(4) # OBJ has an whitespace but thats was too error prone for the yaml, so just pad it here objs = [x for x in bzs['LAY '][f'l{layer}'][objtype] if x['id'] == id] if len(objs) != 1: print(f'Error finding object: {json.dumps(objdelete)}') else: obj = objs[0] bzs['LAY '][f'l{layer}'][objtype].remove(obj) modified = True print(f'removed object from {layer} in room {room} with id {objdelete["id"]:04X}') # print(obj) for objadd in filter(lambda x: x['type']=='objadd' and x.get('room',None)==room, stagepatches): layer = objadd.get('layer', None) objtype = objadd['objtype'].ljust(4) # OBJ has an whitespace but thats was too error prone for the yaml, so just pad it here obj = objadd['object'] if objtype in ['SOBS','SOBJ','STAS','STAG','SNDT']: new_obj = OrderedDict( params1 = obj['params1'], params2 = obj['params2'], posx = obj['posx'], posy = obj['posy'], posz = obj['posz'], sizex = obj['sizex'], sizey = obj['sizey'], sizez = obj['sizez'], anglex = obj['anglex'], angley = obj['angley'], anglez = obj['anglez'], id = (obj['id'] & ~0x3FF) | next_id, name = obj['name'], ) elif objtype in ['OBJS','OBJ ','DOOR']: new_obj = OrderedDict( params1 = obj['params1'], params2 = obj['params2'], posx = obj['posx'], posy = obj['posy'], posz = obj['posz'], anglex = obj['anglex'], angley = obj['angley'], anglez = obj['anglez'], id = (obj['id'] & ~0x3FF) | next_id, name = obj['name'], ) else: print(f'Error: unknown objtype: {objtype}') continue next_id += 1 if not objtype in bzs['LAY '][f'l{layer}']: bzs['LAY '][f'l{layer}'][objtype] = [] # add object name to objn objn = bzs['LAY '][f'l{layer}']['OBJN'] if not obj['name'] in objn: objn.append(obj['name']) bzs['LAY '][f'l{layer}'][objtype].append(new_obj) modified = True print(f'added object {obj["name"]} to {layer} in room {room}') # print(obj) if stage == 'F405' and room == 0: # patch hero's tunic, sailcloth and goddess sword in opening CS bzs['EVNT'][0]['story_flag2'] = 36 bzs['EVNT'][0]['item'] = 15 modified = True elif stage == 'F001r' and room == 1: # put all storyflags in links room at the start if not 'STAG' in bzs['LAY ']['l0']: bzs['LAY ']['l0']['STAG'] = [] for storyflag in patches['global'].get('startstoryflags',[]): new_obj = OrderedDict( params1 = 0xFFFFFFFF, params2 = 0xFF5FFFFF, posx = 761, posy = -22, posz = -2260, sizex = 1000, sizey = 1000, sizez = 1000, anglex = storyflag, angley = 0, anglez = 65535, id = (0xFD84 & ~0x3FF) | next_id, name = "SwAreaT", ) bzs['LAY ']['l0']['STAG'].append(new_obj) next_id += 1 bzs['EVNT'][0]['item'] = 11 bzs['EVNT'][0]['sceneflag1'] = 67 bzs['EVNT'][0]['sceneflag2'] = 0 modified = True elif stage == 'F300_5' and room == 0: # Add save statue to leave dungeon after boss room to prevent softlock # TODO: move to patches.yaml new_obj = OrderedDict( params1 = 0xFF0302FF, params2 = 0xFF5FFFFF, posx = 836, posy = 0, posz = 305, anglex = 0xFFFF, angley = 0xBC30, anglez = 0xFFFF, id = (0xFD84 & ~0x3FF) | next_id, name = "saveObj", ) next_id += 1 if not 'OBJS' in bzs['LAY ']['l0']: bzs['LAY ']['l0']['OBJS'] = [] bzs['LAY ']['l0']['OBJS'].append(new_obj) bzs['LAY ']['l0']['OBJN'].append('saveObj') assert len(bzs['SCEN']) == 3, "F300_5 room 0 should have 3 SCEN" new_scen = OrderedDict( name = "F300", room = 0, layer = 0, entrance = 5, byte4 = 2, byte5 = 2, flag6 = 0, zero = 0, flag8 = 0 ) bzs['SCEN'].append(new_scen) modified = True elif stage == 'F300_4' and room == 0: # make harp CS not give an item and change storyflag # TODO move to patches.yaml bzs['EVNT'][20]['item'] = -1 bzs['EVNT'][20]['story_flag1'] = 914 modified = True if modified: # print(json.dumps(bzs)) return bzs else: return None patcher.set_bzs_patch(bzs_patch_func) def flow_patch(msbf, filename): modified = False flowpatches = eventpatches.get(filename, []) for command in filter(lambda x: x['type'] == 'flowpatch', flowpatches): flowobj = msbf['FLW3']['flow'][command['index']] for key, val in command['patch'].items(): flowobj[key] = val print(f'patched flow {command["index"]}, {filename}') modified = True for command in filter(lambda x: x['type'] == 'flowadd', flowpatches): assert len(msbf['FLW3']['flow']) == command['index'], f'index has to be the next value in the flow, expected {len(msbf["FLW3"]["flow"])} got {command["index"]}' flowobj = OrderedDict( type='type1', subType=-1, param1=0, param2=0, next=-1, param3=0, param4=0, param5=0, ) for key, val in command['flow'].items(): flowobj[key] = val msbf['FLW3']['flow'].append(flowobj) print(f'added flow {command["index"]}, {filename}') modified = True if filename == '003-ItemGet': # make progressive mitts make_progressive_item(msbf, 93, [35, 231], [56, 99], [904, 905]) # make progressive swords # TODO fix empty textboxes # TODO trainings and goddess sword both set storyflags on their own, could reuse those make_progressive_item(msbf, 136, [77, 608, 75, 78, 74, 73], [10, 11, 12, 9, 13, 14], [906, 907, 908, 909, 910, 911]) # make progressive beetle make_progressive_item(msbf, 96, [38, 178], [53, 75], [912, 913]) modified = True if modified: return msbf else: return None def text_patch(msbt, filename): modified = False textpatches = eventpatches.get(filename, []) for command in filter(lambda x: x['type'] == 'textpatch', textpatches): msbt['TXT2'][command['index']] = command['text'].encode('utf-16be') print(f'patched text {command["index"]}, {filename}') modified = True if modified: return msbt else: return None patcher.set_event_patch(flow_patch) patcher.set_event_text_patch(text_patch) patcher.do_patch() # patch main.dol orig_dol = bytearray((patcher.actual_extract_path / 'DATA' / 'sys' / 'main.dol').read_bytes()) for dolpatch in patches['global'].get('asm',{}).get('main',[]): actual_code = bytes.fromhex(dolpatch['original']) patched_code = bytes.fromhex(dolpatch['patched']) assert len(actual_code) == len(patched_code), "code length has to remain the same!" code_pos = orig_dol.find(actual_code) assert code_pos != -1, f"code {dolpatch['original']} not found in main.dol!" assert orig_dol.find(actual_code, code_pos+1) == -1, f"code {dolpatch['original']} found multiple times in main.dol!" orig_dol[code_pos:code_pos+len(actual_code)] = patched_code write_bytes_create_dirs(patcher.modified_extract_path / 'DATA' / 'sys' / 'main.dol', orig_dol) rel_arc = U8File.parse_u8(BytesIO((patcher.actual_extract_path / 'DATA' / 'files' / 'rels.arc').read_bytes())) rel_modified = False for file, codepatches in patches['global'].get('asm',{}).items(): if file == 'main': # main.dol continue rel = rel_arc.get_file_data(f'rels/{file}NP.rel') if rel is None: print(f'ERROR: rel {file} not found!') continue rel = bytearray(rel) for codepatch in codepatches: actual_code = bytes.fromhex(codepatch['original']) patched_code = bytes.fromhex(codepatch['patched']) assert len(actual_code) == len(patched_code), "code length has to remain the same!" code_pos = rel.find(actual_code) assert code_pos != -1, f"code {codepatch['original']} not found in {file}!" assert rel.find(actual_code, code_pos+1) == -1, f"code {codepatch['original']} found multiple times in {file}!" rel[code_pos:code_pos+len(actual_code)] = patched_code rel_arc.set_file_data(f'rels/{file}NP.rel',rel) rel_modified = True if rel_modified: rel_data = rel_arc.to_buffer() write_bytes_create_dirs(patcher.modified_extract_path / 'DATA' / 'files' / 'rels.arc', rel_data) # patch object pack objpack_data = nlzss11.decompress((patcher.actual_extract_path / 'DATA' / 'files' / 'Object' / 'ObjectPack.arc.LZ').read_bytes()) object_arc = U8File.parse_u8(BytesIO(objpack_data)) objpack_modified = False for oarc in patches['global'].get('objpackoarcadd',[]): oarc_data = (patcher.oarc_cache_path / f'{oarc}.arc').read_bytes() object_arc.add_file_data(f'oarc/{oarc}.arc', oarc_data) objpack_modified = True if objpack_modified: objpack_data = object_arc.to_buffer() write_bytes_create_dirs(patcher.modified_extract_path / 'DATA' / 'files' / 'Object' / 'ObjectPack.arc.LZ', nlzss11.compress(objpack_data))
def do_patch(self): self.modified_extract_path.mkdir(parents=True, exist_ok=True) # stages for stagepath in (self.actual_extract_path / 'DATA' / 'files' / 'Stage').glob('*/*_stg_l*.arc.LZ'): match = STAGE_REGEX.match(stagepath.parts[-1]) stage = match[1] layer = int(match[2]) modified_stagepath = self.modified_extract_path / 'DATA' / 'files' / 'Stage' / f'{stage}' / f'{stage}_stg_l{layer}.arc.LZ' modified = False stagedata = nlzss11.decompress(stagepath.read_bytes()) stageu8 = U8File.parse_u8(BytesIO(stagedata)) # add additional arcs if needed for arc in self.stage_oarc_add.get((stage, layer), []): oarc_bytes = (self.oarc_cache_path / f'{arc}.arc').read_bytes() stageu8.add_file_data(f'oarc/{arc}.arc', oarc_bytes) modified = True if layer == 0: stagebzs = parseBzs(stageu8.get_file_data('dat/stage.bzs')) # patch stage if self.bzs_patch: newstagebzs = self.bzs_patch(stagebzs, stage, None) if newstagebzs is not None: stageu8.set_file_data('dat/stage.bzs', buildBzs(newstagebzs)) modified = True # patch rooms for roomid in range(len(stagebzs.get('RMPL', [0]))): roomdata = stageu8.get_file_data( f'rarc/{stage}_r{roomid:02}.arc') if roomdata is None: continue roomarc = U8File.parse_u8(BytesIO(roomdata)) roombzs = parseBzs( roomarc.get_file_data('dat/room.bzs')) roombzs = self.bzs_patch(roombzs, stage, roomid) if roombzs is not None: roomarc.set_file_data('dat/room.bzs', buildBzs(roombzs)) stageu8.set_file_data( f'rarc/{stage}_r{roomid:02}.arc', roomarc.to_buffer()) modified = True # repack u8 and compress it if modified if modified: stagedata = stageu8.to_buffer() write_bytes_create_dirs(modified_stagepath, nlzss11.compress(stagedata)) print(f'patched {stage} l{layer}') elif self.copy_unmodified: shutil.copy(stagepath, modified_stagepath) print(f'copied {stage} l{layer}') # events and text eventrootpath = None modified_eventrootpath = None # check target language for path, lang in LANGUAGES.items(): if (self.actual_extract_path / 'DATA' / 'files' / path).exists(): eventrootpath = self.actual_extract_path / 'DATA' / 'files' / path / 'Object' / lang modified_eventrootpath = self.modified_extract_path / 'DATA' / 'files' / path / 'Object' / lang if eventrootpath == None: raise Exception('Event files not found') for eventpath in eventrootpath.glob('*.arc'): modified = False filename = eventpath.parts[-1] modified_eventpath = modified_eventrootpath / filename eventarc = U8File.parse_u8(BytesIO(eventpath.read_bytes())) for eventfilepath in eventarc.get_all_paths(): eventfilename = eventfilepath.split("/")[-1] if eventfilename.endswith('.msbf'): parsedMsb = parseMSB(eventarc.get_file_data(eventfilepath)) if self.event_patch: patchedMsb = self.event_patch(parsedMsb, eventfilename[:-5]) if patchedMsb: eventarc.set_file_data(eventfilepath, buildMSB(patchedMsb)) modified = True elif eventfilename.endswith('.msbt'): parsedMsb = parseMSB(eventarc.get_file_data(eventfilepath)) if self.event_text_patch: patchedMsb = self.event_text_patch( parsedMsb, eventfilename[:-5]) if patchedMsb: eventarc.set_file_data(eventfilepath, buildMSB(patchedMsb)) modified = True if modified: write_bytes_create_dirs(modified_eventpath, eventarc.to_buffer()) print(f'patched {filename}')
def do_patch(self): self.modified_extract_path.mkdir(parents=True, exist_ok=True) # set for all stage, layer combination that need to be modified stages_layer_to_patch = set() stages_layer_to_patch.update(self.stage_oarc_add.keys()) stages_layer_to_patch.update( (stage, 0) for stage in self.stage_patches.keys()) stages_layer_to_patch.update( (stage, 0) for stage, room in self.room_patches.items()) # stages for stagepath in (self.actual_extract_path / 'DATA' / 'files' / 'Stage').glob('*/*_stg_l*.arc.LZ'): match = STAGE_REGEX.match(stagepath.parts[-1]) stage = match[1] layer = int(match[2]) if self.keep_path: modified_stagepatch = self.modified_extract_path / 'DATA' / 'files' / 'Stage' / f'{stage}' / f'{stage}_stg_l{layer}.arc.LZ' else: modified_stagepatch = self.modified_extract_path / f'{stage}_stg_l{layer}.arc.LZ' if not (stage, layer) in stages_layer_to_patch: if self.copy_unmodified: modified_stagepatch.write_bytes(stagepath.read_bytes()) print(f'copied {stage} l{layer}') else: stagedata = nlzss11.decompress(stagepath.read_bytes()) stageu8 = U8File.parse_u8(BytesIO(stagedata)) # add additional arcs if needed for arc in self.stage_oarc_add.get((stage, layer), []): oarc_bytes = (self.oarc_cache_path / f'{arc}.arc').read_bytes() stageu8.add_file_data(f'oarc/{arc}.arc', oarc_bytes) # patch stage bzs if needed if stage in self.stage_patches: stagebzs = parseBzs(stageu8.get_file_data('dat/stage.bzs')) stagebzs = self.stage_patches[stage](stagebzs) stageu8.set_file_data('dat/stage.bzs', buildBzs(stagebzs)) # patch all rooms that are needed for roomid, patchfunc in self.room_patches[stage].items(): roomarc = U8File.parse_u8( BytesIO( stageu8.get_file_data( f'rarc/{stage}_r{roomid:02}.arc'))) roombzs = parseBzs(roomarc.get_file_data('dat/room.bzs')) roombzs = patchfunc(roombzs) roomarc.set_file_data('dat/room.bzs', buildBzs(roombzs)) stageu8.set_file_data(f'rarc/{stage}_r{roomid:02}.arc', roomarc.to_buffer()) # repack u8 and compress it stagedata = stageu8.to_buffer() modified_stagepatch.write_bytes(nlzss11.compress(stagedata)) print(f'patched {stage} l{layer}') # events eventrootpath = None modified_eventrootpath = None # check target language for path, lang in LANGUAGES.items(): if (self.actual_extract_path / 'DATA' / 'files' / path).exists(): eventrootpath = self.actual_extract_path / 'DATA' / 'files' / path / 'Object' / lang if self.keep_path: modified_eventrootpath = self.modified_extract_path / 'DATA' / 'files' / path / 'Object' / lang else: modified_eventrootpath = self.modified_extract_path break if eventrootpath == None: raise Exception('Event files not found') needed_eventfiles = set(x[0] for x in self.event_patches.keys() ) # first letter determines which file to use for eventpath in eventrootpath.glob('*.arc'): filename = eventpath.parts[-1] match = EVENT_REGEX.match(filename) eventfilenum = match[1] modified_eventpath = modified_eventrootpath / filename if not eventfilenum in needed_eventfiles: if self.copy_unmodified: modified_eventpath.write_bytes(eventpath.read_bytes()) print(f'copied {filename}') else: eventarc = U8File.parse_u8(BytesIO(eventpath.read_bytes())) for file, patchfunc in self.event_patches.items(): if not str(eventfilenum) == file[ 0]: # first letter determines which file to use continue parsedMsb = parseMSB( eventarc.get_file_data(f'{filename[:-4]}/{file}')) parsedMsb = patchfunc(parsedMsb) eventarc.set_file_data(f'{filename[:-4]}/{file}', buildMSB(parsedMsb)) modified_eventpath.write_bytes(eventarc.to_buffer()) print(f'patched {filename}')
def do_gamepatches(rando): patcher = AllPatcher(actual_extract_path=rando.actual_extract_path, modified_extract_path=rando.modified_extract_path, oarc_cache_path=rando.oarc_cache_path, copy_unmodified=False) with (RANDO_ROOT_PATH / "patches.yaml").open() as f: patches = yaml.safe_load(f) with (RANDO_ROOT_PATH / "eventpatches.yaml").open() as f: eventpatches = yaml.safe_load(f) rando.progress_callback('building arc cache...') with (RANDO_ROOT_PATH / "extracts.yaml").open() as f: extracts = yaml.safe_load(f) patcher.create_oarc_cache(extracts) def filter_option_requirement(entry): return not (isinstance(entry, dict) and 'onlyif' in entry \ and not rando.logic.check_logical_expression_string_req(entry['onlyif'])) filtered_storyflags = [] for storyflag in patches['global']['startstoryflags']: # conditionals are an object if not isinstance(storyflag, int): if filter_option_requirement(storyflag): storyflag = storyflag['storyflag'] else: continue filtered_storyflags.append(storyflag) # filter startstoryflags patches['global']['startstoryflags'] = filtered_storyflags # Add sword story/itemflags if required start_sword_count = rando.starting_items.count('Progressive Sword') for i in range(start_sword_count): patches['global']['startstoryflags'].append( PROGRESSIVE_SWORD_STORYFLAGS[i]) if start_sword_count > 0: patches['global']['startitems'].append( PROGRESSIVE_SWORD_ITEMIDS[start_sword_count - 1]) # if 'Sailcloth' in rando.starting_items: # patches['global']['startstoryflags'].append(32) # patches['global']['startitems'].append(15) rando_stagepatches, stageoarcs, rando_eventpatches = get_patches_from_location_item_list( rando.logic.item_locations, rando.logic.done_item_locations) # Add required dungeon patches to eventpatches DUNGEON_TO_EVENTFILE = { 'Skyview': '201-ForestD1', 'Earth Temple': '301-MountainD1', 'Lanayru Mining Facility': '400-Desert', 'Ancient Cistern': '202-ForestD2', 'Sandship': '401-DesertD2', 'Fire Sanctuary': '304-MountainD2', } REQUIRED_DUNGEON_STORYFLAGS = [902, 903, 926, 927, 928, 929] for i, dungeon in enumerate(rando.required_dungeons): dungeon_events = eventpatches[DUNGEON_TO_EVENTFILE[dungeon]] required_dungeon_storyflag_event = next( filter(lambda x: x['name'] == 'rando required dungeon storyflag', dungeon_events)) required_dungeon_storyflag_event['flow'][ 'param2'] = REQUIRED_DUNGEON_STORYFLAGS[ i] # param2 is storyflag of event required_dungeon_count = len(rando.required_dungeons) # set flags for unrequired dungeons beforehand for required_dungeon_storyflag in REQUIRED_DUNGEON_STORYFLAGS[ required_dungeon_count:]: patches['global']['startstoryflags'].append(required_dungeon_storyflag) # patch required dungeon text in if required_dungeon_count == 0: required_dungeons_text = 'No Dungeons' elif required_dungeon_count == 6: required_dungeons_text = 'All Dungeons' elif required_dungeon_count < 4: required_dungeons_text = 'Required Dungeons:\n' + ('\n'.join( rando.required_dungeons)) else: required_dungeons_text = 'Required: ' + ', '.join( rando.required_dungeons) # try to fit the text in as few lines as possible, breaking up at spaces if necessary cur_line = '' combined = '' for part in required_dungeons_text.split(' '): if len(cur_line + part) > 27: # limit of one line combined += cur_line + '\n' cur_line = part + ' ' else: cur_line += part + ' ' combined += cur_line required_dungeons_text = combined.strip() eventpatches['107-Kanban'].append({ "name": "Knight Academy Billboard text", "type": "textpatch", "index": 18, "text": required_dungeons_text, }) # Add storyflags for startitems (only tablets for now) for item in rando.starting_items: if item in START_ITEM_STORYFLAGS: patches['global']['startstoryflags'].append( START_ITEM_STORYFLAGS[item]) # add startflags to eventpatches startstoryflags = patches['global'].get('startstoryflags', None) startsceneflags = patches['global'].get('startsceneflags', None) startitems = patches['global'].get('startitems', None) def pop_or_default(lst, default=-1): if len(lst) == 0: return default else: return lst.pop(0) for cs_stage, cs_room, cs_index in START_CUTSCENES: if not cs_stage in patches: patches[cs_stage] = [] if cs_stage.startswith('F0'): # make sure to only set sceneflags on skyloft patches[cs_stage].append({ 'name': 'Startflags', 'type': 'objpatch', 'room': cs_room, 'index': cs_index, 'objtype': 'EVNT', 'object': { 'item': pop_or_default(startitems), 'story_flag1': pop_or_default(startstoryflags), 'story_flag2': pop_or_default(startstoryflags), 'sceneflag1': pop_or_default(startsceneflags), 'sceneflag2': pop_or_default(startsceneflags), }, }) else: patches[cs_stage].append({ 'name': 'Startflags', 'type': 'objpatch', 'room': cs_room, 'index': cs_index, 'objtype': 'EVNT', 'object': { 'item': pop_or_default(startitems), 'story_flag1': pop_or_default(startstoryflags), 'story_flag2': pop_or_default(startstoryflags), }, }) # for now, we can only set scene and storyflags here, so make sure all items were handled in the event assert len(startitems) == 0, "Not all items were handled in events!" while startsceneflags or startstoryflags: patches['F001r'].append({ 'name': 'Startflags', 'type': 'objadd', 'room': 1, # Link's room 'layer': 0, 'objtype': 'STAG', 'object': { "params1": 0xFFFFFF00 | (pop_or_default(startsceneflags) & 0xFF), "params2": 0xFF5FFFFF, "posx": 761, "posy": -22, "posz": -2260, "sizex": 1000, "sizey": 1000, "sizez": 1000, "anglex": pop_or_default(startstoryflags) & 0xFFFF, "angley": 0, "anglez": 65535, "name": "SwAreaT", } }) remove_stageoarcs = defaultdict(set) for stage, stagepatches in patches.items(): if stage == 'global': continue for patch in stagepatches: if patch['type'] == 'oarcadd': stageoarcs[(stage, patch['destlayer'])].add(patch['oarc']) elif patch['type'] == 'oarcdelete': remove_stageoarcs[(stage, patch['layer'])].add(patch['oarc']) # stageoarcs[('D000',0)].add('GetSwordA') for (stage, layer), oarcs in stageoarcs.items(): patcher.add_stage_oarc(stage, layer, oarcs) for (stage, layer), oarcs in remove_stageoarcs.items(): patcher.delete_stage_oarc(stage, layer, oarcs) def bzs_patch_func(bzs, stage, room): stagepatches = patches.get(stage, []) stagepatches = list(filter(filter_option_requirement, stagepatches)) modified = False if room == None: layer_patches = list( filter(lambda x: x['type'] == 'layeroverride', stagepatches)) if len(layer_patches) > 1: print(f"ERROR: multiple layer overrides for stage {stage}!") elif len(layer_patches) == 1: layer_override = [ OrderedDict(story_flag=x['story_flag'], night=x['night'], layer=x['layer']) for x in layer_patches[0]['override'] ] bzs['LYSE'] = layer_override modified = True next_id = highest_objid(bzs) + 1 for objpatch in filter( lambda x: x['type'] == 'objpatch' and x.get('room', None) == room, stagepatches): obj = get_entry_from_bzs(bzs, objpatch) if not obj is None: for key, val in objpatch['object'].items(): if key in obj: obj[key] = val else: try_patch_obj(obj, key, val) modified = True # print(f'modified object from {layer} in room {room} with id {objpatch["id"]:04X}') # print(obj) for objmove in filter( lambda x: x['type'] == 'objmove' and x.get('room', None) == room, stagepatches): obj = get_entry_from_bzs(bzs, objmove, remove=True) destlayer = objmove['destlayer'] if not obj is None: layer = objmove['layer'] objtype = objmove['objtype'].ljust(4) obj['id'] = (obj['id'] & ~0x3FF) | next_id next_id += 1 if not objtype in bzs['LAY '][f'l{destlayer}']: bzs['LAY '][f'l{destlayer}'][objtype] = [] bzs['LAY '][f'l{destlayer}'][objtype].append(obj) objn = bzs['LAY '][f'l{destlayer}']['OBJN'] if not obj['name'] in objn: objn.append(obj['name']) modified = True # print(f'moved object from {layer} to {destlayer} in room {room} with id {objmove["id"]:04X}') # print(obj) for objdelete in filter( lambda x: x['type'] == 'objdelete' and x.get('room', None) == room, stagepatches): obj = get_entry_from_bzs(bzs, objdelete, remove=True) if not obj is None: modified = True # print(f'removed object from {layer} in room {room} with id {objdelete["id"]:04X}') # print(obj) for command in filter( lambda x: x['type'] == 'objnadd' and x.get('room', None) == room, stagepatches): layer = command.get('layer', None) name_to_add = command['objn'] if layer is None: if not 'OBJN' in bzs: bzs['OBJN'] = [] objlist = bzs['OBJN'] else: if not 'OBJN' in bzs['LAY '][f'l{layer}']: bzs['LAY '][f'l{layer}']['OBJN'] = [] objlist = bzs['LAY '][f'l{layer}']['OBJN'] objlist.append(name_to_add) for objadd in filter( lambda x: x['type'] == 'objadd' and x.get('room', None) == room, stagepatches): layer = objadd.get('layer', None) objtype = objadd['objtype'].ljust( 4 ) # OBJ has an whitespace but thats was too error prone for the yaml, so just pad it here obj = objadd['object'] if objtype in ['SOBS', 'SOBJ', 'STAS', 'STAG', 'SNDT']: new_obj = DEFAULT_SOBJ.copy() elif objtype in ['OBJS', 'OBJ ', 'DOOR']: new_obj = DEFAULT_OBJ.copy() elif objtype == 'SCEN': new_obj = DEFAULT_SCEN.copy() else: print(f'Error: unknown objtype: {objtype}') continue if 'index' in obj: # check index, just to verify index based lists don't have a mistake in them if layer is None: objlist = bzs.get(objtype, []) else: objlist = bzs['LAY '][f'l{layer}'].get(objtype, []) if len(objlist) != obj['index']: print( f'ERROR: wrong index adding object: {json.dumps(objadd)}' ) continue for key, val in obj.items(): new_obj[key] = val if 'id' in new_obj: new_obj['id'] = (new_obj['id'] & ~0x3FF) | next_id next_id += 1 if layer is None: if not objtype in bzs: bzs[objtype] = [] objlist = bzs[objtype] else: if not objtype in bzs['LAY '][f'l{layer}']: bzs['LAY '][f'l{layer}'][objtype] = [] objlist = bzs['LAY '][f'l{layer}'][objtype] # add object name to objn if it's some kind of actor if objtype in [ 'SOBS', 'SOBJ', 'STAS', 'STAG', 'SNDT', 'OBJS', 'OBJ ', 'DOOR' ]: # TODO: this only works if the layer is set objn = bzs['LAY '][f'l{layer}']['OBJN'] if not obj['name'] in objn: objn.append(obj['name']) objlist.append(new_obj) modified = True # print(obj) # patch randomized items on stages for objname, layer, objid, itemid in rando_stagepatches.get( (stage, room), []): modified = True try: RANDO_PATCH_FUNCS[objname](bzs['LAY '][f'l{layer}'], itemid, objid) except: print(f'ERROR: {stage}, {room}, {layer}, {objname}, {objid}') if stage == 'F001r' and room == 1: # put all storyflags in links room at the start if not 'STAG' in bzs['LAY ']['l0']: bzs['LAY ']['l0']['STAG'] = [] for storyflag in patches['global'].get('startstoryflags', []): new_obj = OrderedDict( params1=0xFFFFFFFF, params2=0xFF5FFFFF, posx=761, posy=-22, posz=-2260, sizex=1000, sizey=1000, sizez=1000, anglex=storyflag, angley=0, anglez=65535, id=(0xFD84 & ~0x3FF) | next_id, name="SwAreaT", ) bzs['LAY ']['l0']['STAG'].append(new_obj) next_id += 1 modified = True if modified: # print(json.dumps(bzs)) return bzs else: return None patcher.set_bzs_patch(bzs_patch_func) text_labels = {} def flow_patch(msbf, filename): modified = False flowpatches = eventpatches.get(filename, []) flowpatches = list(filter(filter_option_requirement, flowpatches)) # dictionary to map flow labels to ids for new flows label_to_index = OrderedDict() next_index = len(msbf['FLW3']['flow']) # fist, fill in all the flow name to index mappings for command in filter(lambda x: x['type'] in ['flowadd', 'switchadd'], flowpatches): label_to_index[command['name']] = next_index next_index += 1 for command in filter(lambda x: x['type'] == 'flowpatch', flowpatches): flowobj = msbf['FLW3']['flow'][command['index']] for key, val in command['flow'].items(): # special case: next points to a label if key == 'next' and not isinstance(val, int): index = label_to_index.get(val, None) if index is None: print( f'ERROR: label {val} not found in patch: {command["flow"]}' ) continue val = index # special case: text points to a label, textindex is param4 if key == 'param4' and not isinstance(val, int): index = text_labels.get(val, None) if index is None: print( f'ERROR: text label {val} not found in patch: {command["flow"]}' ) continue val = index flowobj[key] = val # print(f'patched flow {command["index"]}, {filename}') modified = True for command in filter(lambda x: x['type'] in ['flowadd', 'switchadd'], flowpatches): assert len(msbf['FLW3']['flow']) == label_to_index[command[ 'name']], f'index has to be the next value in the flow, expected {len(msbf["FLW3"]["flow"])} got {label_to_index[command["name"]]}' flowobj = OrderedDict( type='type1', subType=-1, param1=0, param2=0, next=-1, param3=0, param4=0, param5=0, ) for key, val in command['flow'].items(): # special case: next points to a label if key == 'next' and not isinstance(val, int): index = label_to_index.get(val, None) if index is None: print( f'ERROR: label {val} not found in new flow: {command["flow"]}' ) continue val = index # special case: text points to a label, textindex is param4 if key == 'param4' and not isinstance(val, int): index = text_labels.get(val, None) if index is None: print( f'ERROR: text label {val} not found in new flow: {command["flow"]}' ) continue val = index flowobj[key] = val if command['type'] == 'flowadd': msbf['FLW3']['flow'].append(flowobj) # print(f'added flow {command["name"]}, {filename}') else: flowobj['type'] = 'switch' cases = command['cases'] for i, _ in enumerate(cases): value = cases[i] if not isinstance(value, int): index = label_to_index.get(value, None) if index is None: print( f'ERROR: label {value} not found in switch: {command}' ) continue cases[i] = index add_msbf_branch(msbf, flowobj, cases) # print(f'added switch {command["name"]}, {filename}') modified = True for command in filter(lambda x: x['type'] == 'entryadd', flowpatches): value = command['entry']['value'] if not isinstance(value, int): index = label_to_index.get(value, None) if index is None: print( f'ERROR: label {value} not found in new entry: {command["entry"]}' ) continue value = index new_entry = OrderedDict( name=command['entry']['name'], value=value, ) bucket = entrypoint_hash(command["entry"]["name"], len(msbf['FEN1'])) msbf['FEN1'][bucket].append(new_entry) # print(f'added flow entry {command["entry"]["name"]}, {filename}') modified = True if filename == '003-ItemGet': # make progressive mitts make_progressive_item(msbf, 93, [35, 231], [56, 99], [904, 905]) # make progressive swords # TODO trainings and goddess sword both set storyflags on their own, could reuse those make_progressive_item(msbf, 136, [77, 608, 75, 78, 74, 73], PROGRESSIVE_SWORD_ITEMIDS, PROGRESSIVE_SWORD_STORYFLAGS) # make progressive beetle make_progressive_item(msbf, 96, [38, 178], [53, 75], [912, 913]) # make progressive pouch make_progressive_item(msbf, 258, [254, 253], [112, 113], [931, 932]) # make progressive wallets make_progressive_item(msbf, 250, [246, 245, 244, 255], [108, 109, 110, 111], [915, 916, 917, 918]) modified = True # patch randomized items for evntline, itemid in rando_eventpatches.get(filename, []): try: # can either be a label or a number evntline = int(evntline) except ValueError: index = label_to_index.get(evntline, None) if index is None: print(f'ERROR: label {evntline} not found!') continue evntline = index # print(f'dynamic label: {evntline}') modified = True msbf['FLW3']['flow'][evntline]['param2'] = itemid msbf['FLW3']['flow'][evntline]['param3'] = 9 # give item command if modified: return msbf else: return None def text_patch(msbt, filename): # for bucket, lbl_list in enumerate(msbt['LBL1']): # for lbl in lbl_list: # hash_b = entrypoint_hash(lbl['name'], len(msbt['LBL1'])) # print(f'smile: {bucket} {hash_b}') assert len(msbt['TXT2']) == len(msbt['ATR1']) modified = False textpatches = eventpatches.get(filename, []) textpatches = list(filter(filter_option_requirement, textpatches)) for command in filter(lambda x: x['type'] == 'textpatch', textpatches): msbt['TXT2'][command['index']] = command['text'].encode('utf-16be') # print(f'patched text {command["index"]}, {filename}') modified = True for command in filter(lambda x: x['type'] == 'textadd', textpatches): index = len(msbt['TXT2']) text_labels[command['name']] = index msbt['TXT2'].append(command['text'].encode('utf-16be')) msbt['ATR1'].append({ 'unk1': command.get('unk1', 1), 'unk2': command.get('unk2', 0) }) # the game doesn't care about the name, but it has to exist and be unique # only unique within a file but whatever entry_name = "%s:%d" % (filename[-3:], index) new_entry = OrderedDict( name=entry_name, value=index, ) bucket = entrypoint_hash(entry_name, len(msbt['LBL1'])) msbt['LBL1'][bucket].append(new_entry) # print(f'added text {index}, {filename}') modified = True if modified: return msbt else: return None patcher.set_event_patch(flow_patch) patcher.set_event_text_patch(text_patch) patcher.progress_callback = rando.progress_callback patcher.do_patch() rando.progress_callback('patching main.dol...') # patch main.dol orig_dol = bytearray((patcher.actual_extract_path / 'DATA' / 'sys' / 'main.dol').read_bytes()) for dolpatch in filter(filter_option_requirement, patches['global'].get('asm', {}).get('main', [])): actual_code = bytes.fromhex(dolpatch['original']) patched_code = bytes.fromhex(dolpatch['patched']) assert len(actual_code) == len( patched_code), "code length has to remain the same!" code_pos = orig_dol.find(actual_code) assert code_pos != -1, f"code {dolpatch['original']} not found in main.dol!" assert orig_dol.find( actual_code, code_pos + 1 ) == -1, f"code {dolpatch['original']} found multiple times in main.dol!" orig_dol[code_pos:code_pos + len(actual_code)] = patched_code write_bytes_create_dirs( patcher.modified_extract_path / 'DATA' / 'sys' / 'main.dol', orig_dol) rando.progress_callback('patching rels...') rel_arc = U8File.parse_u8( BytesIO((patcher.actual_extract_path / 'DATA' / 'files' / 'rels.arc').read_bytes())) rel_modified = False for file, codepatches in patches['global'].get('asm', {}).items(): if file == 'main': # main.dol continue rel = rel_arc.get_file_data(f'rels/{file}NP.rel') if rel is None: print(f'ERROR: rel {file} not found!') continue rel = bytearray(rel) for codepatch in filter(filter_option_requirement, codepatches): actual_code = bytes.fromhex(codepatch['original']) patched_code = bytes.fromhex(codepatch['patched']) assert len(actual_code) == len( patched_code), "code length has to remain the same!" code_pos = rel.find(actual_code) assert code_pos != -1, f"code {codepatch['original']} not found in {file}!" if codepatch.get('multiple', False): while code_pos != -1: rel[code_pos:code_pos + len(actual_code)] = patched_code code_pos = rel.find(actual_code, code_pos + 1) else: assert rel.find( actual_code, code_pos + 1 ) == -1, f"code {codepatch['original']} found multiple times in {file}!" rel[code_pos:code_pos + len(actual_code)] = patched_code rel_arc.set_file_data(f'rels/{file}NP.rel', rel) rel_modified = True if rel_modified: rel_data = rel_arc.to_buffer() write_bytes_create_dirs( patcher.modified_extract_path / 'DATA' / 'files' / 'rels.arc', rel_data) rando.progress_callback('patching ObjectPack...') # patch object pack objpack_data = nlzss11.decompress( (patcher.actual_extract_path / 'DATA' / 'files' / 'Object' / 'ObjectPack.arc.LZ').read_bytes()) object_arc = U8File.parse_u8(BytesIO(objpack_data)) objpack_modified = False for oarc in patches['global'].get('objpackoarcadd', []): oarc_data = (patcher.oarc_cache_path / f'{oarc}.arc').read_bytes() object_arc.add_file_data(f'oarc/{oarc}.arc', oarc_data) objpack_modified = True if objpack_modified: objpack_data = object_arc.to_buffer() write_bytes_create_dirs( patcher.modified_extract_path / 'DATA' / 'files' / 'Object' / 'ObjectPack.arc.LZ', nlzss11.compress(objpack_data)) # patch title screen logo actual_data = (rando.actual_extract_path / 'DATA' / 'files' / 'US' / 'Layout' / 'Title2D.arc').read_bytes() actual_arc = U8File.parse_u8(BytesIO(actual_data)) logodata = (rando.rando_root_path / 'assets' / 'logo.tpl').read_bytes() actual_arc.set_file_data('timg/tr_wiiKing2Logo_00.tpl', logodata) (rando.modified_extract_path / 'DATA' / 'files' / 'US' / 'Layout' / 'Title2D.arc').write_bytes(actual_arc.to_buffer())
def test_compress(file): assert TEST_FILE_DATA_UNCOMP[file] == decompress_bytes( bytes(nlzss11.compress(TEST_FILE_DATA_UNCOMP[file])))
def test_roundtrip(file): assert TEST_FILE_DATA_UNCOMP[file] == nlzss11.decompress( nlzss11.compress(TEST_FILE_DATA_UNCOMP[file]))