def get_translations_keys_values(string_files): base_obj = [ item for item in string_files if item.get('lang_code') == 'Base' ][0] if base_obj: base_strings = localizable.parse_strings( filename=base_obj.get('file_path')) languages.append('Key') languages.append('English') for base_string in base_strings: new_array = [] new_array.append(base_string['key'].encode("utf-8")) new_array.append(base_string['value'].encode("utf-8")) translations.append(new_array) lang_index = 2 for translation in string_files: if translation['lang_code'] != 'Base': languages.append(translation['lang_code']) translation_strings = localizable.parse_strings( filename=translation.get('file_path')) for index, es_mx_string in enumerate(translation_strings): key = es_mx_string['key'].encode("utf-8") value = es_mx_string['value'].encode("utf-8") find_and_add(key, value, lang_index) lang_index += 1 else: return None
def merge_applestrings_translations(lang, fname, fresh): """ Often using an old translation is better than reverting to the English when a translation is incomplete. So we'll merge old translations into fresh ones. """ fresh_translation = localizable.parse_strings(content=fresh) english_translation = localizable.parse_strings( filename='./PsiphonClientCommonLibrary/Resources/Strings/en.lproj/%s' % fname) try: existing_fname = './PsiphonClientCommonLibrary/Resources/Strings/%s.lproj/%s' % ( lang, fname) existing_translation = localizable.parse_strings( filename=existing_fname) except Exception as ex: print( 'merge_applestrings_translations: failed to open existing translation: %s -- %s\n' % (existing_fname, ex)) return fresh fresh_merged = '' for entry in fresh_translation: try: english = next(x['value'] for x in english_translation if x['key'] == entry['key']) except: english = None try: existing = next(x for x in existing_translation if x['key'] == entry['key']) # Make sure we don't fall back on an untranslated value. See comment # on function `flag_untranslated_*` for details. if UNTRANSLATED_FLAG in existing['comment']: existing = None else: existing = existing['value'] except: existing = None fresh_value = entry['value'] if fresh_value == english and existing is not None and existing != english: # DEBUG #print('merge_applestrings_translations:', entry['key'], fresh_value, existing) # The fresh translation has the English fallback fresh_value = existing escaped_fresh = fresh_value.replace('"', '\\"').replace('\n', '\\n') fresh_merged += '/*%s*/\n"%s" = "%s";\n\n' % ( entry['comment'], entry['key'], escaped_fresh) return fresh_merged
def merge_applestrings_translations(master_fpath, lang, trans_fpath, fresh_raw): """Merge Xcode `.strings` files. Can be passed as a mutator to `process_resource`. """ # First flag all the untranslated entries, for later reference. fresh_raw = _flag_untranslated_applestrings(master_fpath, lang, trans_fpath, fresh_raw) fresh_translation = localizable.parse_strings(content=fresh_raw) english_translation = localizable.parse_strings(filename=master_fpath) try: existing_translation = localizable.parse_strings(filename=trans_fpath) except Exception as ex: print( f'merge_applestrings_translations: failed to open existing translation: {trans_fpath} -- {ex}\n' ) return fresh_raw fresh_merged = '' for entry in fresh_translation: try: english = next(x['value'] for x in english_translation if x['key'] == entry['key']) except: english = None try: existing = next(x for x in existing_translation if x['key'] == entry['key']) # Make sure we don't fall back on an untranslated value. See comment # on function `flag_untranslated_*` for details. if UNTRANSLATED_FLAG in existing['comment']: existing = None else: existing = existing['value'] except: existing = None fresh_value = entry['value'] if fresh_value == english and existing is not None and existing != english: # The fresh translation has the English fallback fresh_value = existing escaped_fresh = fresh_value.replace('"', '\\"').replace('\n', '\\n') fresh_merged += f'/*{entry["comment"]}*/\n"{entry["key"]}" = "{escaped_fresh}";\n\n' return fresh_merged
def flag_untranslated_applestrings(_, fname, fresh): """ When retrieved from Transifex, Apple .strings files include all string table entries, with the English provided for untranslated strings. This counteracts our efforts to fall back to previous translations when strings change. Like so: - Let's say the entry `"CANCEL_ACTION" = "Cancel";` is untranslated for French. It will be in the French strings file as the English. - Later we change "Cancel" to "Stop" in the English, but don't change the key. - On the next transifex_pull, this script will detect that the string is untranslated and will look at the previous French "translation" -- which is the previous English. It will see that that string differs and get fooled into thinking that it's a valid previous translation. - The French UI will keep showing "Cancel" instead of "Stop". While pulling translations, we are going to flag incoming non-translated strings, so that we can check later and not use them a previous translation. We'll do this "flagging" by putting the string "[UNTRANSLATED]" into the string comment. (An alternative approach that would also work: Remove any untranslated string table entries. But this seems more drastic than modifying a comment could have unforeseen side-effects.) """ fresh_translation = localizable.parse_strings(content=fresh) english_translation = localizable.parse_strings( filename='./PsiphonClientCommonLibrary/Resources/Strings/en.lproj/%s' % fname) fresh_flagged = '' for entry in fresh_translation: try: english = next(x['value'] for x in english_translation if x['key'] == entry['key']) except: english = None if entry['value'] == english: # DEBUG #print('flag_untranslated_applestrings:', entry['key'], entry['value']) # The string is untranslated, so flag the comment entry['comment'] = UNTRANSLATED_FLAG + entry['comment'] entry['value'] = entry['value'].replace('"', '\\"').replace('\n', '\\n') fresh_flagged += '/*%s*/\n"%s" = "%s";\n\n' % ( entry['comment'], entry['key'], entry['value']) return fresh_flagged
def merge_applestrings_translations(lang, fname, fresh): """ Often using an old translation is better than reverting to the English when a translation is incomplete. So we'll merge old translations into fresh ones. """ fresh_translation = localizable.parse_strings(content=fresh) english_translation = localizable.parse_strings( filename='./Endless/en.lproj/%s' % fname) try: existing_fname = './Endless/%s.lproj/%s' % (lang, fname) existing_translation = localizable.parse_strings( filename=existing_fname) except Exception as ex: print( 'merge_applestrings_translations: failed to open existing translation: %s -- %s\n' % (existing_fname, ex)) return fresh fresh_merged = '' for entry in fresh_translation: try: english = next(x['value'] for x in english_translation if x['key'] == entry['key']) except: english = None try: existing = next(x['value'] for x in existing_translation if x['key'] == entry['key']) except: existing = None fresh = entry['value'] if fresh == english and existing is not None and existing != english: # DEBUG #print('merge_applestrings_translations:', entry['key'], fresh, existing) # The fresh translation has the English fallback fresh = existing escaped_fresh = fresh.replace('"', '\\"').replace('\n', '\\n') fresh_merged += '/*%s*/\n"%s" = "%s";\n\n' % ( entry['comment'], entry['key'], escaped_fresh) return fresh_merged
def parse_localizations(): stringsets = {} contents = os.listdir(LOCALIZATIONS_DIR) for directory_name in contents: if LPROJ_EXTENSION in directory_name: language_name = directory_name.split('.')[0] localizations.append(language_name) for localization in localizations: directory_name = localization + LPROJ_EXTENSION path = os.path.join(LOCALIZATIONS_DIR, directory_name, STRINGS_FILENAME) strings = localizable.parse_strings(filename=path) stringsets[localization] = strings description = u"" for i, string in enumerate(strings): value = unicode(string['value']) if i == 0: appstore_titles[localization] = value else: if u'github' in value.lower(): value = value + u' ' + GITHUB_PROJECT_URL if i == 1: description = value else: description = description + u'\n' + value appstore_descriptions[localization] = description
def mergeLocalization(srcFilePath, destFilePath, cleanMode=False, ignoreFilePath=None): srcTranslations = localizable.parse_strings(filename=srcFilePath) destTranslations = localizable.parse_strings(filename=destFilePath) for key, value in srcTranslations.items(): if not key in destTranslations: destTranslations[key] = value else: destTranslations[key] = { 'comment': value['comment'], 'value': destTranslations[key]['value'] } if cleanMode: for key, value in destTranslations.items(): if not key in srcTranslations: del destTranslations[key] if ignoreFilePath: ignoreTranslations = localizable.parse_strings(filename=ignoreFilePath) for key, value in ignoreTranslations.items(): if value['value'] == 'DO NOT TRANSLATE': if key in destTranslations: del destTranslations[key] resultDict = {} for key, value in destTranslations.items(): prepKey = prepareKeyForVslp(key) prepValue = prepareValueForVslp(value['value']) if not prepKey in resultDict: resultDict[prepKey] = { 'comment': value['comment'], 'value': prepValue } contents = '' for key, value in resultDict.items(): contents += LOCALIZATION_GROUP_COMMENT_FORMAT.format(value['comment']) contents += LOCALIZATION_FORMAT.format(key, value['value']) with open(destFilePath, "w") as f: f.write(contents.encode("utf-8")) return len(resultDict)
def prepareLocalizedStrings(srcFilePath): srcTranslations = localizable.parse_strings(filename=srcFilePath) contents = '' for key, value in srcTranslations.items(): contents += LOCALIZATION_GROUP_COMMENT_FORMAT.format(value['comment']) contents += LOCALIZATION_FORMAT.format(key, value['value']) with open(srcFilePath, "w") as f: f.write(contents.encode("utf-8"))
def load_source(filename): if filename.endswith('.po'): l = polib.pofile(filename) m = create_po_map(l) elif filename.endswith('.strings'): l = localizable.parse_strings(filename=filename) m = create_strings_map(l) else: assert False, filename return l, m
def load_source(filename): if filename.endswith('.po'): l = polib.pofile(filename) m = create_po_map(l) elif filename.endswith('.strings'): l = localizable.parse_strings(filename=filename) m = create_strings_map(l) else: assert False, filename return l, m
def gen(self): basePath = os.path.join(self.path, self.base + '.lproj', self.filename) destPath = os.path.join(self.path, self.dest + '.lproj', self.filename) outPath = os.path.join(self.path, self.dest + '.lproj', self.filename + '.txt') print 'Loading strings files ...' print basePath baseLang = localizable.parse_strings(filename = basePath) print destPath destLang = localizable.parse_strings(filename = destPath) print 'Done' print 'Merging ...', # 将 base 中新增的 merge 到 destLang merged = self._mergeTo(baseLang, destLang) print 'Done' self._generate_strings_file(outPath, merged) pass
def prepareForVslp(srcFilePath): srcTranslations = localizable.parse_strings(filename=srcFilePath) resultDict = {} for key, value in srcTranslations.items(): prepKey = prepareKeyForVslp(key) prepValue = prepareValueForVslp(value['value']) if not prepKey in resultDict: resultDict[prepKey] = { 'comment': value['comment'], 'value': prepValue } contents = '' for key, value in resultDict.items(): contents += LOCALIZATION_GROUP_COMMENT_FORMAT.format(value['comment']) contents += LOCALIZATION_FORMAT.format(key, value['value']) with open(srcFilePath, "w") as f: f.write(contents.encode("utf-8"))
break item = l.copy() if trans: item['value'] = prefix + trans + suffix result.append(item) return result def load_source(filename): if filename.endswith('.po'): l = polib.pofile(filename) m = create_po_map(l) elif filename.endswith('.strings'): l = localizable.parse_strings(filename=filename) m = create_strings_map(l) else: assert False, filename return l, m if __name__ == '__main__': f1 = sys.argv[1] f2 = sys.argv[2] f3 = sys.argv[3] s = localizable.parse_strings(filename=f1) l1, m1 = load_source(f2) l2, m2 = load_source(f3) r = apply(s, m1, m2) print(localizable.write_strings(r).encode('utf-8'))
sys.setdefaultencoding('utf8') import time import localizable from googletrans import Translator if len(sys.argv) != 2: print "usage: autolocalize.py File.strings" exit(1) srcLang = "en" srcFile = sys.argv[1] srcFilename = os.path.basename(srcFile) languages = ["zh-CN", "zh-TW", "ja", "ko", "de", "fr", "it", "es", "pt", "ms"] strings = localizable.parse_strings(filename=srcFile) translator = Translator() def header(lang): return "/*\n\ {}\n\ Generated with autolocalize - https://github.com/mattlawer/autolocalize\n\ \n\ Translated from {} to {} on {}.\n\ */\n\n\n".format(srcFilename, srcLang, lang, time.strftime("%x")) for l in languages: lproj = "{}.lproj".format( l.replace("zh-CN", "zh-Hans").replace("zh-TW", "zh-Hant"))
def fillSheetIOS(string_path, ws): #check relative file base_folder = os.path.dirname(string_path) base_folder = os.path.join(base_folder, os.pardir) file_name = os.path.basename(string_path) # Vietnamese vi_path = os.path.join(base_folder, "vi.lproj") vi_path = os.path.join(vi_path, file_name) vi_strings = None if os.path.exists(vi_path): vi_strings = localizable.parse_strings(filename=vi_path) # Japanese ja_path = os.path.join(base_folder, "ja.lproj") ja_path = os.path.join(ja_path, file_name) ja_strings = None if os.path.exists(ja_path): ja_strings = localizable.parse_strings(filename=ja_path) # Fill table data strings = localizable.parse_strings(filename=string_path) row = 1 thin_border = Border(left=Side(style='thin'), right=Side(style='thin'), top=Side(style='thin'), bottom=Side(style='thin')) haveError = False for item in strings: row += 1 id_cell = "A" + str(row) description_cell = "B" + str(row) vi_cell = "D" + str(row) en_cell = "C" + str(row) ja_cell = "E" + str(row) str_key = item['key'] str_comment = item['comment'] if isinstance(str_key, unicode): print str_key + ':' + str_comment ws[description_cell].value = str_comment ws[id_cell].value = str_key ws[id_cell].border = thin_border ws[id_cell].alignment = ws[id_cell].alignment.copy(wrapText=True) ws[description_cell].border = thin_border ws[en_cell].value = item['value'] ws[en_cell].border = thin_border ws[en_cell].alignment = ws[vi_cell].alignment.copy(wrapText=True) if vi_strings != None: try: ws[vi_cell].value = findValue(vi_strings, str_key) except: haveError = True redFill = PatternFill(start_color='FFFF0000', end_color='FFFF0000', fill_type='solid') ws[vi_cell].fill = redFill ws[vi_cell].border = thin_border ws[vi_cell].alignment = ws[en_cell].alignment.copy(wrapText=True) if ja_strings != None: ws[ja_cell].value = findValue(ja_strings, str_key) ws[ja_cell].border = thin_border ws[ja_cell].alignment = ws[ja_cell].alignment.copy(wrapText=True) if haveError: ws.sheet_properties.tabColor = 'FF0000'
item = l.copy() if trans: item['value'] = prefix + trans + suffix result.append(item) return result def load_source(filename): if filename.endswith('.po'): l = polib.pofile(filename) m = create_po_map(l) elif filename.endswith('.strings'): l = localizable.parse_strings(filename=filename) m = create_strings_map(l) else: assert False, filename return l, m if __name__ == '__main__': f1 = sys.argv[1] f2 = sys.argv[2] f3 = sys.argv[3] s = localizable.parse_strings(filename=f1) l1, m1 = load_source(f2) l2, m2 = load_source(f3) r = apply(s, m1, m2) print(localizable.write_strings(r).encode('utf-8'))
def load_lproj_strings(strings): result = dict() strings = localizable.parse_strings(filename=strings) for entry in strings: result[entry['key']] = entry['value'] return result