def __init__(self, target, lang): self.buffer = target self.lang = lang self.po = polib.POFile()
def run(self): args = self.args application = WSGIApplication(self.location, self.get_settings(), disable_autoreload=True, master_settings=self.master_settings) archive = application.archive try: libs = [archive.libs[lib_name] for lib_name in args.libs] except KeyError: self.console.error( 'No lib with name "{}" installed'.format(lib_name)) return -1 table = [] for lib in libs: template_text = set() extract_text = defaultdict(lambda: {"occurrences": []}) if not lib.translations_location: table.append([ lib.long_name, Cell("translations not enabled", fg="red", bold=True), '' ]) continue filename = "{}.pot".format(lib.long_name.replace('.', '_')) translations_dir = lib.load_fs.getsyspath( lib.translations_location) def add_text(path, line, text, comment=None, plural=None, attr=None, context=None): rel_path = os.path.relpath(path, translations_dir) entry = extract_text[(text, plural, attr, context)] if attr is not None and context is not None: context = "attribute '{}'".format(attr) if plural is not None: entry['msgid'] = text entry['msgid_plural'] = plural entry['msgstr_plural'] = {'0': '', '1': ''} else: entry['msgid'] = text if context is not None: entry['msgctxt'] = context entry['occurrences'].append((rel_path, line)) if comment is not None: entry['comment'] = comment with self.console.progress("extracting {}".format(lib), len(lib.documents)) as progress: for doc in lib.documents: progress.step() for element in doc.elements.itervalues(): if element._translate_text: text = element._text.strip() if text: add_text(element._location, element.source_line, text, comment=unicode(element)) for name, attribute in element._tag_attributes.items(): if attribute.translate or name in element._translatable_attrs: text = element._attrs.get(name, '').strip() if text: add_text( element._location, element.source_line, text, attr=name, comment="attribute '{}' of {}".format( name, unicode(element))) if 'location' in lib.templates_info: engine = archive.get_template_engine('moya') with lib.load_fs.opendir(lib.templates_info['location'] ) as templates_fs: for path in templates_fs.walkfiles(): sys_path = templates_fs.getsyspath( path, allow_none=True) or path contents = templates_fs.getcontents(path) template = Template(contents, path) template.parse(engine.env) for trans_text in template.translatable_text: line, start, end = trans_text.location text = trans_text.text comment = trans_text.comment plural = trans_text.plural translatable_text = (path, line, start, text, plural) if translatable_text not in template_text: add_text(sys_path, line, text, comment, plural=plural, context=trans_text.context) template_text.add(translatable_text) now = pytz.UTC.localize(datetime.utcnow()) po = polib.POFile() for text in extract_text.values(): po.append(polib.POEntry(**text)) po.metadata = { 'POT-Creation-Date': now.strftime('%Y-%m-%d %H:%M%z'), 'Project-Id-Version': lib.version, 'Language': lib.default_language or 'en', 'MIME-Version': '1.0', 'Content-Type': 'text/plain; charset=utf-8', 'Content-Transfer-Encoding': '8Bit', 'Plural-Forms': 'nplurals=2; plural=(n != 1);' } if lib.translations_location: lib.load_fs.makedir(lib.translations_location, allow_recreate=True) translations_location = lib.load_fs.getsyspath( lib.translations_location) translation_path = os.path.join(translations_location, filename) if os.path.exists(translation_path) and not args.overwrite: if not args.merge: self.console.error( 'message file "{}" exists, see --merge or --overwrite options' .format(filename)) return -1 existing_po = polib.pofile(translation_path) po.merge(existing_po) po.save(translation_path) else: po.save(translation_path) locale_fs = lib.load_fs.opendir(lib.translations_location) for lang in lib.languages: locale_fs.makeopendir("{}/LC_MESSAGES/".format(lang), recursive=True) table.append([ lib.long_name, Cell(pathjoin(lib.translations_location, filename), fg="green", bold=True), Cell(len(po), bold=True) ]) self.console.table(table, header_row=["lib", "file", "no. strings"])
if node.nodeType == node.ELEMENT_NODE: if node.tagName == "String": stringId = node.getAttribute("Id") if stringId == langid: comment = "" continue stringContent = node.firstChild.data translatedStrings[stringId] = stringContent doc = minidom.parse(sourcefile) wixloc = doc.getElementsByTagName("WixLocalization")[0] culture = wixloc.getAttribute("Culture") codepage = wixloc.getAttribute("Codepage") po = polib.POFile(wrapwidth=0) po.metadata = { 'MIME-Version': '1.0', 'Content-Type': 'text/plain; charset=utf-8', 'Content-Transfer-Encoding': '8bit', 'Language': transculture } root = doc.documentElement nodes = root.childNodes comment = "" for node in nodes: if node.nodeType == node.COMMENT_NODE: comment = node.data
def __init__(self): usage = """ Usage: cinnamon-json-makepot -i | -r | [-js] <potfile name> -js, --js - Runs xgettext on any javascript files in your directory before scanning the settings-schema.json file. This allows you to generate a .pot file for your entire applet at once. *** The following two options should only be run in your applet, desklet, or extension's directory *** -i, --install - Compiles and installs any .po files contained in a po folder to the system locale store. Use this option to test your translations locally before uploading to Spices. It will use the applet, desklet, or extension UUID as the translation domain -r, --remove - The opposite of install, removes translations from the store. Again, it uses the UUID to find the correct files to remove <potfile name> - name of the .pot file to work with. This can be pre-existing, or the name of a new file to use. If you leave off the .pot extension, it will be automatically appended to the file name. For instance: cinnamon-json-makepot myapplet Will generate a file called myapplet.pot, or append to a file of that name. This can then be used by translators to be made into a po file. For example: msginit --locale=fr --input=myapplet.pot Will create "fr.po" for the French language. A translator can use a utility such as poedit to add translations to this file, or edit the file manually. .po files can be added to a "po" folder in your applet's directory, and will be compiled and installed into the system when the applet is installed via Cinnamon Settings. """ parser = OptionParser(usage=usage) parser.add_option("-j", "--js", action="store_true", dest="js", default=False) parser.add_option("-i", "--install", action="store_true", dest="install", default=False) parser.add_option("-r", "--remove", action="store_true", dest="remove", default=False) (options, args) = parser.parse_args() if options.install: self.do_install() if options.remove: self.do_remove() if not args: parser.print_help() quit() self.potname = args[0] if not self.potname.endswith(".pot"): self.potname = self.potname + ".pot" self.domain = self.potname.replace(".pot", "") self.potpath = os.path.join(os.getcwd(), self.potname) if options.js: try: import subprocess subprocess.call(["xgettext", "--version"]) except OSError: print "xgettext not found, you may need to install the gettext package" quit() print " " print "Running xgettext on JavaScript files..." tmp = tempfile.NamedTemporaryFile(prefix="cinnamon-json-makepot-") try: os.system('find . -iname "*.js" > %s' % tmp.name) finally: os.system( "xgettext --language=C --keyword=_ --output=%s --files-from=%s" % (self.potname, tmp.name)) self.current_parent_dir = "" append = False if os.path.exists(self.potpath): append = True if append: self.po = polib.pofile(self.potpath) else: self.po = polib.POFile() print "Scanning metadata.json and settings-schema.json..." self.scan_dirs() if append: self.po.save() else: self.po.save(fpath=self.potpath) print "Extraction complete" quit()
def _overload_terms(self, language, verbosity, polib, file_name, *args): if not args: raise CommandError('please give at least one term') catalog_entries = {} catalog_dirpath, catalog_path = self._get_catalog_paths(language, file_name) all_plural_forms = set() if exists(catalog_path): catalog = polib.pofile(catalog_path) for entry in catalog.translated_entries(): entry.obsolete = True catalog_entries[entry.msgid] = entry else: if verbosity >= 1: self.stdout.write('Create catalog at {}'.format(catalog_path)) if not exists(catalog_dirpath): makedirs(catalog_dirpath) elif not isdir(catalog_dirpath): self.stderr.write('"{}" exists and is not a directory.'.format(catalog_dirpath)) return catalog = polib.POFile() catalog.metadata = { 'Project-Id-Version': 'PACKAGE VERSION', 'Report-Msgid-Bugs-To': '', 'PO-Revision-Date': 'YEAR-MO-DA HO:MI+ZONE', 'Last-Translator': 'FULL NAME <EMAIL@ADDRESS>', 'Language-Team': 'LANGUAGE <*****@*****.**>', 'MIME-Version': '1.0', 'Content-Type': 'text/plain; charset=UTF-8', 'Content-Transfer-Encoding': '8bit', } catalog.metadata['POT-Creation-Date'] = pytz.timezone(settings.TIME_ZONE) \ .localize(datetime.now()) \ .strftime('%Y-%m-%d %H:%M%z') terms = [smart_text(arg) for arg in args] entry_count = 0 for app_pofile in self._iter_pofiles(language, polib, file_name): plural_forms = app_pofile.metadata.get('Plural-Forms') if plural_forms: all_plural_forms.add(plural_forms) for entry in app_pofile.translated_entries(): entry_count += 1 msgid = entry.msgid existing_entry = catalog_entries.get(msgid) # TODO: manage context (key=msgid + context ?) if existing_entry is not None: if existing_entry.obsolete: # Entry has not been updated yet existing_entry.obsolete = False existing_entry.occurrences = entry.occurrences else: existing_entry.occurrences += entry.occurrences else: for term in terms: if term in msgid: # TODO: what about case sensitivity ? entry.flags.append('fuzzy') catalog.append(entry) catalog_entries[entry.msgid] = entry break if not catalog.fpath and all_plural_forms: # Creation of the file if len(all_plural_forms) > 1: self.stderr.write('Different information about plural forms were found (first one used):{}'.format( ''.join('\n - {}'.format(i) for i in all_plural_forms) ) ) catalog.metadata['Plural-Forms'] = next(iter(all_plural_forms)) catalog.save(catalog_path) if verbosity >= 1: self.stdout.write('Number of examined entries: {}'.format(entry_count))
def help_test_check_for_translation_updates( self, first_time, changed, resource_exists=True, language_exists=True ): """ Helper to test several conditions, since all the setup is so convoluted. """ language_code = "zh-Hans" license = LicenseFactory(version="4.0", license_code="by-nd") first_translation_update_datetime = datetime.datetime( 2007, 1, 25, 12, 0, 0, tzinfo=utc ) changed_translation_update_datetime = datetime.datetime( 2020, 9, 30, 13, 11, 52, tzinfo=utc ) if first_time: # We don't yet know when the last update was. legalcode_last_update = None else: # The last update we know of was at this time. legalcode_last_update = first_translation_update_datetime legalcode = LegalCodeFactory( license=license, language_code=language_code, translation_last_update=legalcode_last_update, ) resource_slug = license.resource_slug # Will need an English legalcode if we need to create the resource if not resource_exists and language_code != DEFAULT_LANGUAGE_CODE: LegalCodeFactory( license=license, language_code=DEFAULT_LANGUAGE_CODE, ) # 'timestamp' returns on translation stats from transifex if changed: # now it's the newer time timestamp = changed_translation_update_datetime.isoformat() else: # it's still the first time timestamp = first_translation_update_datetime.isoformat() mock_repo = MagicMock() mock_repo.is_dirty.return_value = False legalcodes = [legalcode] dummy_repo = DummyRepo("/trans/repo") # A couple of places use git.Repo(path) to get a git repo object. Have them # all get back our same dummy repo. def dummy_repo_factory(path): return dummy_repo helper = TransifexHelper() with mpo( helper, "handle_legalcodes_with_updated_translations" ) as mock_handle_legalcodes, mpo( helper, "get_transifex_resource_stats" ) as mock_get_transifex_resource_stats, mpo( helper, "create_resource" ) as mock_create_resource, mpo( LegalCode, "get_pofile" ) as mock_get_pofile, mpo( helper, "upload_messages_to_transifex" ) as mock_upload: if resource_exists: if language_exists: mock_get_transifex_resource_stats.return_value = { resource_slug: { language_code: { "translated": { "last_activity": timestamp, } } } } else: # language does not exist first time, does the second time mock_get_transifex_resource_stats.side_effect = [ {resource_slug: {}}, { resource_slug: { language_code: { "translated": { "last_activity": timestamp, } } } }, ] else: # First time does not exist, second time does mock_get_transifex_resource_stats.side_effect = [ {}, { resource_slug: { language_code: { "translated": { "last_activity": timestamp, } } } }, ] # Will need pofile mock_get_pofile.return_value = polib.POFile() helper.check_for_translation_updates_with_repo_and_legalcodes( dummy_repo, legalcodes ) if not resource_exists: # Should have tried to create resource mock_create_resource.assert_called_with( resource_slug=resource_slug, resource_name=legalcode.license.fat_code(), pofilename=os.path.basename(legalcode.translation_filename()), pofile_content=get_pofile_content(mock_get_pofile.return_value), ) else: # Not mock_create_resource.assert_not_called() if language_exists: mock_upload.assert_not_called() else: mock_upload.assert_called() mock_get_transifex_resource_stats.assert_called_with() legalcode.refresh_from_db() if changed: # we mocked the actual processing, so... self.assertEqual( first_translation_update_datetime, legalcode.translation_last_update ) mock_handle_legalcodes.assert_called_with(dummy_repo, [legalcode]) else: self.assertEqual( first_translation_update_datetime, legalcode.translation_last_update ) mock_handle_legalcodes.assert_called_with(dummy_repo, []) return
manualData = loadManualFile(sys.argv[3]) dataset = sys.argv[1] json_data = open(dataset) data = json.load(json_data) json_data.close() displayInConsole = False # Get last modification time of data set modtime = os.path.getmtime(dataset) modtime_utc = datetime.datetime.utcfromtimestamp(modtime) modtime_utc_string = modtime_utc.strftime('%Y-%m-%d %H:%M') + '+0000' # Header po = polib.POFile() po.metadata = { 'Project-Id-Version': 'gcompris_qt\\n', 'Report-Msgid-Bugs-To': 'https://bugs.kde.org/enter_bug.cgi?product=gcompris', 'POT-Creation-Date': modtime_utc_string, 'PO-Revision-Date': modtime_utc_string, 'Last-Translator': 'FULL NAME <EMAIL@ADDRESS>\n', 'Language-Team': 'LANGUAGE <*****@*****.**>\n', 'MIME-Version': '1.0', 'Content-Type': 'text/plain; charset=utf-8', 'Content-Transfer-Encoding': '8bit', } for chapter in data: for lesson in chapter['content']:
def convert(self): pofile = polib.POFile() pofile.metadata = { 'Project-Id-Version': '1.0', 'Report-Msgid-Bugs-To': 'none', 'POT-Creation-Date': '2007-10-18 14:00+0100', 'PO-Revision-Date': '2007-10-18 14:00+0100', 'Last-Translator': '*****@*****.**', 'Language-Team': 'Catalan <*****@*****.**>', 'MIME-Version': '1.0', 'Content-Type': 'text/plain; charset=utf-8', 'Content-Transfer-Encoding': '8bit', 'Plural-Forms': 'nplurals=2; plural=n != 1;', } tree = ET.parse(self.input_file) root = tree.getroot() sources = set() entries = 0 for tu_entry in root.iter('tu'): entry_id = None if 'tuid' in tu_entry.attrib: if len(tu_entry.attrib['tuid']): entry_id = 'id: {0}'.format(tu_entry.attrib['tuid']) source = '' translation = '' for tuv_entry in tu_entry: if tuv_entry.tag != 'tuv': continue if '{http://www.w3.org/XML/1998/namespace}lang' in tuv_entry.attrib: llengua = tuv_entry.attrib[ '{http://www.w3.org/XML/1998/namespace}lang'].lower() else: llengua = tuv_entry.attrib['lang'].lower() for seg_entry in tuv_entry.iter('seg'): if llengua == 'en' or llengua == 'en-us': source = seg_entry.text elif llengua == 'ca': translation = seg_entry.text if source is None or source == '': continue if translation is None or translation == '': continue if source in sources: msgctxt = str(entries) else: msgctxt = None sources.add(source) entry = polib.POEntry(msgid=source, msgstr=translation, msgctxt=msgctxt, tcomment=entry_id) pofile.append(entry) entries = entries + 1 pofile.save(self.output_file)
def save_to_pofile(self, path, translations, potcontent, create_skeletons, locale, doc_name): """ Save PO file to path, based on json objects of pot and translations @param translations: the json object of the content retrieved from server @param path: the po folder for output @param pot: the json object of the pot retrieved from server """ po = polib.POFile(fpath=path) # potcontent = json.loads(pot) # pylint: disable=E1103 textflows = potcontent.get('textFlows') if potcontent.get('extensions'): extensions = potcontent.get('extensions')[0] po.header = extensions.get('comment') for item in extensions.get('entries'): po.metadata[item['key']] = item['value'] # specify Content-Type charset to UTF-8 pattern = r'charset=[^;]*' if po.metadata.has_key('Content-Type'): re.sub(pattern, "charset=UTF-8", po.metadata['Content-Type']) else: po.metadata['Content-Type'] = "text/plain; charset=UTF-8" for textflow in textflows: poentry = polib.POEntry(occurrences=None) poentry.msgid = textflow.get('content') if textflow.get('extensions'): entry_list = textflow.get('extensions') for entry in entry_list: if entry.get('object-type') == 'pot-entry-header': # PotEntryHeader # Check the references is not empty if entry.get('references') != [u'']: ref_list = [] for item in entry.get('references'): # in some cases, entry contains more than one reference if ' ' in item: reference = item.split(' ') for i in reference: ref_list.append(tuple(i.rsplit(':', 1))) else: ref_list.append(tuple(item.rsplit(':', 1))) poentry.occurrences = ref_list else: poentry.occurrences = None if entry.get('flags'): poentry.flags = entry.get('flags') if entry.get('context') is not None: poentry.msgctxt = entry.get('context') if entry.get('object-type') == 'comment': # SimpleComment poentry.comment = entry.get('value') if textflow.get('contents'): poentry.msgid = textflow.get('contents')[0] poentry.msgid_plural = textflow.get('contents')[1] poentry.msgstr_plural[0] = '' else: poentry.msgstr = '' po.append(poentry) # If the translation is exist, read the content of the po file if translations: content = translations # "extensions":[{"object-type":"po-target-header", "comment":"comment_value", "entries": # [{"key":"ht","value":"vt1"}]}] if content.get('extensions'): ext = content.get('extensions')[0] header_comment = ext.get('comment') if header_comment: po.header = header_comment for item in ext.get('entries'): po.metadata[item['key']] = item['value'] targets = content.get('textFlowTargets') if not create_skeletons: if not targets: self.log.warn( "No translations found in %s for document %s" % (locale, doc_name)) return translationsByResId = {} for translation in targets: resId = translation.get('resId') translationsByResId[resId] = translation # "extensions":[{"object-type":"comment","value":"testcomment","space":"preserve"}] # copy any other stuff you need to transfer for poentry in po: resId = self.get_resId(poentry) translation = translationsByResId.get(resId) if translation: if translation.get('extensions'): extensions = translation.get('extensions') for entry in extensions: if entry.get('object-type') == 'comment': if entry.get('value'): poentry.tcomment = entry.get('value') content = translation.get('content') if poentry.msgid_plural: contents = translation.get('contents') if contents: i = 0 for msg in contents: poentry.msgstr_plural[i] = msg i = i + 1 elif content: poentry.msgstr_plural[0] = content else: if content: poentry.msgstr = content if translation.get('state') == 'NeedReview': if poentry.flags == [u'']: poentry.flags = ['fuzzy'] else: poentry.flags.insert(0, 'fuzzy') else: if poentry.flags == [u'']: poentry.flags = None # finally save resulting po to outpath as lang/myfile.po po.save() # pylint: disable=E1103 self.log.info("Writing po file to %s" % (path))