def parseFile(filename): chunk = list() with open_n_decode(filename, "r", "utf-8") as f: string = prepare(f) jsondata = dict() try: jsondata = loads(string) except: print("Cannot parse " + filename) return [] paths = list_field_paths(jsondata) dialog = dirname(filename).endswith("dialog") for path in paths: for k in files_of_interest.keys(): if filename.endswith(k) or k == "*": for roi in files_of_interest[k]: if roi.match(path) or dialog: val = field_by_path(jsondata, path) if not type(val) is str: print("File: " + filename) print("Type of " + path + " is not a string!") continue if val == "": continue for handler in textHandlers: res = handler(val, filename, '/' + path) if res: chunk += res break break return chunk
def sunlesssea_event_teaser(the_dir): print("events.json.json is loading") events_text = json.loads(prepare(open(the_dir, "rb+", "utf-8"))) path_dict = dict() print("Loading complete") for i in events_text: if "Chs" in i["Texts"]: if i["Texts"]["Eng"] == i["Texts"]["Chs"]: del i["Texts"]["Chs"] for i in events_text: if "Chs" in i["Texts"]: w = i["Files"] for v in w["entities/events.json"]: if re.search(r".*/Description$", v): path_dict[v.replace("/Description", "/Teaser")] = i["Texts"]["Chs"] for i in events_text: w = i["Files"] for v in w["entities/events.json"]: if re.search(r'.*Teaser$', v): if v in path_dict: if re.search(r'! ', i["Texts"]["Eng"]): Chs_text = path_dict[v].split('!') i["Texts"]["Chs"] = Chs_text[0] + "! - " elif re.search(r'\?', i["Texts"]["Eng"]): Chs_text = path_dict[v].split('?') i["Texts"]["Chs"] = Chs_text[0] + "?" else: Chs_text = path_dict[v].split('.') i["Texts"]["Chs"] = Chs_text[0] + "..." f = open(the_dir, "wb+", "utf-8") f.write( json.dumps(events_text, ensure_ascii=False, sort_keys=True, indent=2)) f.close
def parseFile(filename): chunk = list() if basename(filename) not in ignore_filelist: print(basename(filename)) with open_n_decode(filename, "r", "utf_8_sig") as f: try: if basename(filename).endswith('.patch'): chunk.append("patch") if basename(filename) in dict.keys(patch_serialization): string = trans_patch( f, patch_serialization[basename(filename)]) else: string = trans_patch(f) paths = to_a_list(string, 0) else: string = prepare(f) jsondata = loads(string) paths = list_field_paths(jsondata) except: print("Cannot parse " + filename) try: problem_file = open(error_list_file, 'a') problem_file.writelines( filename.replace(root_dir, '') + '\n') problem_file.close() except: pass return [] filename_base = filename if basename(filename).endswith('.patch'): filename = filename.replace('.patch', "") dialog = dirname(filename).endswith("dialog") for i, path in enumerate(paths): for k in files_of_interest.keys(): if filename.endswith(k) or k == "*": for roi in files_of_interest[k]: if roi.match(path) or dialog: if basename(filename_base).endswith('.patch'): val = to_a_list(string, 1)[i] else: val = field_by_path(jsondata, path) if not type(val) is str: print("File: " + filename) print("Type of " + path + " is not a string!") continue if val == "": continue for handler in textHandlers: res = handler(val, filename, '/' + path) if res: chunk += res break break return chunk
def trans_patch(jsons): string = prepare(jsons) json_text = json.loads(string) value_list = list() path_list = list() path_list_2 = list() value_list_2 = list() op_list = list() for i, value in enumerate(json_text): path_result = value['path'] op_result = value['op'] try: value_result = value['value'] except: value_result = '' value_list.append(value_result) path_list.append(path_result) op_list.append(op_result) dict_result = tuple(zip(op_list, path_list, value_list)) for i in dict_result: if i[0] == 'add' or i[0] == 'replace': path_1 = i[1] path_2 = list_field_paths(i[2]) if path_2 == []: path_2 = ['*'] else: pass for v in path_2: if path_2 == ['*']: value = i[2] path = path_1.replace('/', '', 1) value_list_2.append(value) path_list_2.append(path) else: value = field_by_path(i[2], v) path = (path_1+'/' + v).replace('/', '', 1) value_list_2.append(value) path_list_2.append(path) else: pass result = tuple(zip(path_list_2, value_list_2)) return result
w = v["RareDefaultEvent"] if "Teaser" in w and w["Teaser"] is not None: cache_1 = list() cache_1.append(w["Id"]) cache_1.append(w["Teaser"]) ##print (w["HumanName"]) # dict_1.append(zip(w["HumanName"],w["Id"])) dict_1.append(cache_1) """ return (dict_1) if __name__ == "__main__": target = json.loads( prepare( open( "F:/Sunless_Sea_Chinese_Translation_Mod_Re/translations/texts/geography/Tiles.json.json", "r", "utf-8-sig"))) old = json.loads( prepare( open("F:/Sunless_Sea_Data/cn_translation/geography/Tiles.json", "r", "utf-8-sig"))) new = json.loads( prepare( open( "F:/Sunless_Sea_Data/Sunless Sea_source_file/geography/Tiles.json", "r", "utf-8-sig"))) old_data = step_1(old, "HumanName") new_data = step_1(new, "HumanName") dict_2 = dict() for i in old_data: for v in new_data:
def trans_patch_spcial_1(jsons, ex): string = prepare(jsons) json_text = json.loads(string) value_list = list() path_list = list() path_list_2 = list() path_list_3 = list() value_list_2 = list() op_list = list() ex_list = ex for i, value in enumerate(json_text): path_result = value['path'] op_result = value['op'] try: value_result = value['value'] except: value_result = '' value_list.append(value_result) path_list.append(path_result) op_list.append(op_result) """ for i in ex_list: if i[2] == 1 : o = i[1] for text in path_list: if not re.search(i[0]+'/'+'-',text) == None : wait = text.replace(i[0]+'/'+'-',i[0]+'/'+str(o)) path_list_3.append(wait) o=o+1 else: path_list_3.append(text) else: for text in path_list: if not re.search(i[0]+'/'+'-',text) == None : wait = text.replace(i[0]+'/-',i[0]+'/'+str(i[1])) path_list_3.append(wait) else: path_list_3.append(text) path_list = path_list_3 """ for i in ex_list: path_list = replace_the_path(path_list, i) dict_result = tuple(zip(op_list, path_list, value_list)) for i in dict_result: if i[0] == 'add' or i[0] == 'replace': path_1 = i[1] path_2 = list_field_paths(i[2]) if path_2 == []: path_2 = ['*'] else: pass for v in path_2: if path_2 == ['*']: value = i[2] path = path_1.replace('/', '', 1) value_list_2.append(value) path_list_2.append(path) else: value = field_by_path(i[2], v) path = (path_1+'/' + v).replace('/', '', 1) value_list_2.append(value) path_list_2.append(path) else: pass result = tuple(zip(path_list_2, value_list_2)) return result
if not re.search(rule[0]+'/'+'-', text) == None: wait = text.replace(rule[0]+'/'+'-', rule[0]+'/'+str(o)) path_list_3.append(wait) o = o+1 else: path_list_3.append(text) else: if not re.search(rule[0]+'/'+'-', text) == None: wait = text.replace(rule[0]+'/-', rule[0]+'/'+str(rule[1])) path_list_3.append(wait) else: path_list_3.append(text) return path_list_3 """ if __name__ == "__main__": jsons3 = open_n_decode( 'F:/FrackinUniverse-sChinese-Project/translations/others/dialog/converse.config.patch', "r", "utf_8_sig") list233 = [('generic', 70, 1),('cheerful', 31, 1),('jerk', 31, 1),('flirty', 31, 1),('anxious', 31, 1),('easilyspooked',32,1),('clumsy',31,1),('excited',31,1),('intrusive',31,1),('dumb',32,1),('emo',30,1),('fast',31,1),('nocturnal',32,1),('socialite',31,1),('ambitious',30,1)] test = trans_patch(jsons3) dict_old = dict() for i in range(len(test)): dict_old['/'+test[i][0]] = test[i][1] print(dict_old) """ if __name__ == "__main__": json_file_3 = open_n_decode( 'F:/sunlesssee/Sunless Sea_bak/entities/events.json', "r", "utf_8_sig") print(json.loads(prepare(json_file_3)))
def sunlesssea_text_loader(un_dir, text_dir, target_dir): print("Sunless Sea text is loading") Associations_text = json.loads( prepare( open(un_dir + '/' + "encyclopaedia/Associations.json", "rb+", "utf-8"))) CombatAttacks_text = json.loads( prepare( open(un_dir + '/' + "encyclopaedia/CombatAttacks.json", "rb+", "utf-8"))) CombatItems_text = json.loads( prepare( open(un_dir + '/' + "encyclopaedia/CombatItems.json", "rb+", "utf-8"))) SpawnedEntities_text = json.loads( prepare( open(un_dir + '/' + "encyclopaedia/SpawnedEntities.json", "rb+", "utf-8"))) Tutorials_text = json.loads( prepare( open(un_dir + '/' + "encyclopaedia/Tutorials.json", "rb+", "utf-8"))) areas_text = json.loads( prepare(open(un_dir + '/' + "entities/areas.json", "rb+", "utf-8"))) events_text = json.loads( prepare(open(un_dir + '/' + "entities/events.json", "rb+", "utf-8"))) exchanges_text = json.loads( prepare(open(un_dir + '/' + "entities/exchanges.json", "rb+", "utf-8"))) qualities_text = json.loads( prepare(open(un_dir + '/' + "entities/qualities.json", "rb+", "utf-8"))) Tiles_text = json.loads( prepare(open(un_dir + '/' + "geography/Tiles.json", "rb+", "utf-8"))) print("Loading complete") for path, d, filelist in os.walk(text_dir): for filename in filelist: w = os.path.join(path, filename).replace('//', '/') json_data = json.loads(prepare(open(w, "rb+", "utf-8"))) for i in json_data: if "Chs" in i["Texts"]: file_list = i["Files"].keys() for p in file_list: path_list = i["Files"][p] if p == "encyclopaedia/Associations.json": for v in path_list: Associations_text = add_value( Associations_text, v.replace('/', '', 1).split('/'), i["Texts"]["Chs"]) elif p == "encyclopaedia/CombatAttacks.json": for v in path_list: CombatAttacks_text = add_value( CombatAttacks_text, v.replace('/', '', 1).split('/'), i["Texts"]["Chs"]) elif p == "encyclopaedia/CombatItems.json": for v in path_list: CombatItems_text = add_value( CombatItems_text, v.replace('/', '', 1).split('/'), i["Texts"]["Chs"]) elif p == "encyclopaedia/SpawnedEntities.json": for v in path_list: SpawnedEntities_text = add_value( SpawnedEntities_text, v.replace('/', '', 1).split('/'), i["Texts"]["Chs"]) elif p == "encyclopaedia/Tutorials.json": for v in path_list: Tutorials_text = add_value( Tutorials_text, v.replace('/', '', 1).split('/'), i["Texts"]["Chs"]) elif p == "entities/areas.json": for v in path_list: areas_text = add_value( areas_text, v.replace('/', '', 1).split('/'), i["Texts"]["Chs"]) elif p == "entities/events.json": for v in path_list: events_text = add_value( events_text, v.replace('/', '', 1).split('/'), i["Texts"]["Chs"]) elif p == "entities/exchanges.json": for v in path_list: exchanges_text = add_value( exchanges_text, v.replace('/', '', 1).split('/'), i["Texts"]["Chs"]) elif p == "entities/qualities.json": for v in path_list: qualities_text = add_value( qualities_text, v.replace('/', '', 1).split('/'), i["Texts"]["Chs"]) elif p == "geography/Tiles.json": for v in path_list: Tiles_text = add_value( Tiles_text, v.replace('/', '', 1).split('/'), i["Texts"]["Chs"]) print("Now,start to import text") Associations_mod = open( target_dir + '/' + "encyclopaedia/Associations.json", "wb+", "utf-8") CombatAttacks_mod = open( target_dir + '/' + "encyclopaedia/CombatAttacks.json", "wb+", "utf-8") CombatItems_mod = open(target_dir + '/' + "encyclopaedia/CombatItems.json", "wb+", "utf-8") SpawnedEntities_mod = open( target_dir + '/' + "encyclopaedia/SpawnedEntities.json", "wb+", "utf-8") Tutorials_mod = open(target_dir + '/' + "encyclopaedia/Tutorials.json", "wb+", "utf-8") areas_mod = open(target_dir + '/' + "entities/areas.json", "wb+", "utf-8") events_mod = open(target_dir + '/' + "entities/events.json", "wb+", "utf-8") exchanges_mod = open(target_dir + '/' + "entities/exchanges.json", "wb+", "utf-8") qualities_mod = open(target_dir + '/' + "entities/qualities.json", "wb+", "utf-8") Tiles_mod = open(target_dir + '/' + "geography/Tiles.json", "wb+", "utf-8") Associations_mod.write( json.dumps(Associations_text, ensure_ascii=False, sort_keys=True, indent=2)) CombatAttacks_mod.write( json.dumps(CombatAttacks_text, ensure_ascii=False, sort_keys=True, indent=2)) CombatItems_mod.write( json.dumps(CombatItems_text, ensure_ascii=False, sort_keys=True, indent=2)) SpawnedEntities_mod.write( json.dumps(SpawnedEntities_text, ensure_ascii=False, sort_keys=True, indent=2)) Tutorials_mod.write( json.dumps(Tutorials_text, ensure_ascii=False, sort_keys=True, indent=2)) areas_mod.write( json.dumps(areas_text, ensure_ascii=False, sort_keys=True, indent=2)) events_mod.write( json.dumps(events_text, ensure_ascii=False, sort_keys=True, indent=2)) exchanges_mod.write( json.dumps(exchanges_text, ensure_ascii=False, sort_keys=True, indent=2)) qualities_mod.write( json.dumps(qualities_text, ensure_ascii=False, sort_keys=True, indent=2)) Tiles_mod.write( json.dumps(Tiles_text, ensure_ascii=False, sort_keys=True, indent=2)) Associations_mod.close CombatAttacks_mod.close CombatItems_mod.close SpawnedEntities_mod.close Tutorials_mod.close areas_mod.close events_mod.close exchanges_mod.close qualities_mod.close Tiles_mod.close print("Import complete")