def MakeStaticTranslation(self, original_filename, encoding): """Given the name of the structure type (self.structure_type), the filename of the file holding the original structure, and optionally the "section" key identifying the part of the file to look at (self.section), creates a temporary file holding a "static" translation of the original structure (i.e. one where all translateable parts have been replaced with "TTTTTT") and returns the temporary file name. It is the caller's responsibility to delete the file when finished. Args: original_filename: 'c:\\bingo\\bla.rc' Return: 'c:\\temp\\werlkjsdf334.tmp' """ original = structure._GATHERERS[self.structure_type]( original_filename, extkey=self.section, encoding=encoding) original.Parse() translated = original.Translate(constants.CONSTANT_LANGUAGE, False) fname = tempfile.mktemp() with util.WrapOutputStream(open(fname, 'w')) as writer: writer.write("Original filename: %s\n=============\n\n" % original_filename) writer.write(translated) # write in UTF-8 return fname
def Run(self, opts, args): args = self.ParseOptions(args) if len(args) != 1: print( 'This tool takes a single tool-specific argument, the path to the\n' '.rc file to process.') return 2 self.SetOptions(opts) path = args[0] out_path = os.path.join( util.dirname(path), os.path.splitext(os.path.basename(path))[0] + '.grd') rcfile = util.WrapInputStream(file(path, 'r'), self.input_encoding) rctext = rcfile.read() grd_text = unicode(self.Process(rctext, path)) rcfile.close() outfile = util.WrapOutputStream(file(out_path, 'w'), 'utf-8') outfile.write(grd_text) outfile.close() print 'Wrote output file %s.\nPlease check for TODO items in the file.' % out_path
def Run(self, globopt, args): args = self.Setup(globopt, args) if len(args) != 3: self.Out('This tool takes exactly three arguments:\n' ' 1. The path to the original RC file\n' ' 2. The path to the translated RC file\n' ' 3. The output file path.\n') return 2 grd = grd_reader.Parse(self.o.input, debug=self.o.extra_verbose) grd.RunGatherers(recursive=True) source_rc = util.WrapInputStream(file(args[0], 'r'), self.rc2grd.input_encoding) transl_rc = util.WrapInputStream(file(args[1], 'r'), self.rc2grd.input_encoding) translations = self.ExtractTranslations(grd, source_rc.read(), args[0], transl_rc.read(), args[1]) transl_rc.close() source_rc.close() output_file = util.WrapOutputStream(file(args[2], 'w')) self.WriteTranslations(output_file, translations.items()) output_file.close() self.Out('Wrote output file %s' % args[2])
def Run(self, global_options, my_arguments): if not len(my_arguments) == 1: print 'This tool requires exactly one argument, the name of the output file.' return 2 filename = my_arguments[0] with util.WrapOutputStream(open(filename, 'w'), 'utf-8') as out: out.write(_FILE_CONTENTS) print "Wrote file %s" % filename
def FileForLanguage(self, lang, output_dir, create_file=True, return_if_not_generated=True): '''Returns the filename of the file associated with this structure, for the specified language. Args: lang: 'fr' output_dir: 'c:\temp' create_file: True ''' assert self.HasFileForLanguage() # If the source language is requested, and no extra changes are requested, # use the existing file. if (lang == self.GetRoot().GetSourceLanguage() and self.attrs['expand_variables'] != 'true' and not self.attrs['run_command']): if return_if_not_generated: return self.GetFilePath() else: return None if self.attrs['output_filename'] != '': filename = self.attrs['output_filename'] else: filename = os.path.basename(self.attrs['file']) assert len(filename) filename = '%s_%s' % (lang, filename) filename = os.path.join(output_dir, filename) # Only create the output if it was requested by the call. if create_file: text = self.gatherer.Translate( lang, pseudo_if_not_available=self.PseudoIsAllowed(), fallback_to_english=self.ShouldFallbackToEnglish(), skeleton_gatherer=self.GetSkeletonGatherer()) file_object = util.WrapOutputStream(file(filename, 'wb'), self._GetOutputEncoding()) file_contents = util.FixLineEnd(text, self.GetLineEnd()) if self.ExpandVariables(): # Note that we reapply substitution a second time here. # This is because a) we need to look inside placeholders # b) the substitution values are language-dependent file_contents = self.GetRoot().substituter.Substitute(file_contents) if self._ShouldAddBom(): file_object.write(constants.BOM) file_object.write(file_contents) file_object.close() if self.attrs['run_command']: # Run arbitrary commands after translation is complete so that it # doesn't interfere with what's in translation console. command = self.attrs['run_command'] % {'filename': filename} result = os.system(command) assert result == 0, '"%s" failed.' % command return filename
def Run(self, globopt, args): self.SetOptions(globopt) assert len( args ) == 2, "Need exactly two arguments, the XTB file and the output file" xtb_file = args[0] output_file = args[1] grd = grd_reader.Parse(self.o.input, debug=self.o.extra_verbose) grd.OnlyTheseTranslations([]) # don't load translations grd.RunGatherers(recursive=True) xtb = {} def Callback(msg_id, parts): msg = [] for part in parts: if part[0]: msg = [] break # it had a placeholder so ignore it else: msg.append(part[1]) if len(msg): xtb[msg_id] = ''.join(msg) f = file(xtb_file) xtb_reader.Parse(f, Callback) f.close() translations = [ ] # list of translations as per transl2tc.WriteTranslations for node in grd: if node.name == 'structure' and node.attrs['type'] == 'menu': assert len(node.GetCliques()) == 1 message = node.GetCliques()[0].GetMessage() translation = [] contents = message.GetContent() for part in contents: if isinstance(part, types.StringTypes): id = grit.extern.tclib.GenerateMessageId(part) if id not in xtb: print "WARNING didn't find all translations for menu %s" % node.attrs[ 'name'] translation = [] break translation.append(xtb[id]) else: translation.append(part.GetPresentation()) if len(translation): translations.append( [message.GetId(), ''.join(translation)]) f = util.WrapOutputStream(file(output_file, 'w')) transl2tc.TranslationToTc.WriteTranslations(f, translations) f.close()
def FileForLanguage(self, lang, output_dir, create_file=True, return_if_not_generated=True): '''Returns the filename of the file associated with this structure, for the specified language. Args: lang: 'fr' output_dir: 'c:\temp' create_file: True ''' assert self.HasFileForLanguage() if (lang == self.GetRoot().GetSourceLanguage() and self.attrs['expand_variables'] != 'true'): if return_if_not_generated: return self.GetFilePath() else: return None else: if self.attrs['output_filename'] != '': filename = self.attrs['output_filename'] else: filename = os.path.basename(self.attrs['file']) assert len(filename) filename = '%s_%s' % (lang, filename) filename = os.path.join(output_dir, filename) if create_file: text = self.gatherer.Translate( lang, pseudo_if_not_available=self.PseudoIsAllowed(), fallback_to_english=self.ShouldFallbackToEnglish(), skeleton_gatherer=self.GetSkeletonGatherer()) file_object = util.WrapOutputStream(file(filename, 'wb'), self._GetOutputEncoding()) file_contents = util.FixLineEnd(text, self.GetLineEnd()) if self.attrs['expand_variables'] == 'true': file_contents = file_contents.replace( '[GRITLANGCODE]', lang) # TODO(jennyz): remove this hard coded logic for expanding # [GRITDIR] variable for RTL languages when the generic # expand_variable code is added by grit team. if lang in _RTL_LANGS: file_contents = file_contents.replace( '[GRITDIR]', 'dir="RTL"') else: file_contents = file_contents.replace( '[GRITDIR]', 'dir="LTR"') if self._ShouldAddBom(): file_object.write(constants.BOM) file_object.write(file_contents) file_object.close() return filename
def Run(self, opts, args): args = self.ParseOptions(args) if len(args) != 1: print 'This tool requires exactly one argument, the name of the output file.' return 2 filename = args[0] with util.WrapOutputStream(open(filename, 'w'), 'utf-8') as out: out.write(_FILE_CONTENTS) print "Wrote file %s" % filename
def Run(self, opts, args): args = self.ParseOptions(args) if len(args) != 1: print('This tool takes a single tool-specific argument, the path to the\n' '.rc file to process.') return 2 self.SetOptions(opts) path = args[0] out_path = os.path.join(util.dirname(path), os.path.splitext(os.path.basename(path))[0] + '.grd') rctext = util.ReadFile(path, self.input_encoding) grd_text = six.text_type(self.Process(rctext, path)) with util.WrapOutputStream(open(out_path, 'wb'), 'utf-8') as outfile: outfile.write(grd_text) print('Wrote output file %s.\nPlease check for TODO items in the file.' % (out_path,))
def Process(self): # Update filenames with those provided by SCons if we're being invoked # from SCons. The list of SCons targets also includes all <structure> # node outputs, but it starts with our output files, in the order they # occur in the .grd if self.scons_targets: assert len(self.scons_targets) >= len(self.res.GetOutputFiles()) outfiles = self.res.GetOutputFiles() for ix in range(len(outfiles)): outfiles[ix].output_filename = os.path.abspath( self.scons_targets[ix]) else: for output in self.res.GetOutputFiles(): output.output_filename = os.path.abspath(os.path.join( self.output_directory, output.GetOutputFilename())) # If there are whitelisted names, tag the tree once up front, this way # while looping through the actual output, it is just an attribute check. if self.whitelist_names: self.AddWhitelistTags(self.res, self.whitelist_names) for output in self.res.GetOutputFiles(): self.VerboseOut('Creating %s...' % output.GetOutputFilename()) # Set the context, for conditional inclusion of resources self.res.SetOutputLanguage(output.GetLanguage()) self.res.SetOutputContext(output.GetContext()) self.res.SetFallbackToDefaultLayout(output.GetFallbackToDefaultLayout()) self.res.SetDefines(self.defines) # Assign IDs only once to ensure that all outputs use the same IDs. if self.res.GetIdMap() is None: self.res.InitializeIds() # Make the output directory if it doesn't exist. self.MakeDirectoriesTo(output.GetOutputFilename()) # Write the results to a temporary file and only overwrite the original # if the file changed. This avoids unnecessary rebuilds. outfile = self.fo_create(output.GetOutputFilename() + '.tmp', 'wb') if output.GetType() != 'data_package': encoding = self._EncodingForOutputType(output.GetType()) outfile = util.WrapOutputStream(outfile, encoding) # Iterate in-order through entire resource tree, calling formatters on # the entry into a node and on exit out of it. with outfile: self.ProcessNode(self.res, output, outfile) # Now copy from the temp file back to the real output, but on Windows, # only if the real output doesn't exist or the contents of the file # changed. This prevents identical headers from being written and .cc # files from recompiling (which is painful on Windows). if not os.path.exists(output.GetOutputFilename()): os.rename(output.GetOutputFilename() + '.tmp', output.GetOutputFilename()) else: # CHROMIUM SPECIFIC CHANGE. # This clashes with gyp + vstudio, which expect the output timestamp # to change on a rebuild, even if nothing has changed, so only do # it when opted in. if not self.write_only_new: write_file = True else: files_match = filecmp.cmp(output.GetOutputFilename(), output.GetOutputFilename() + '.tmp') write_file = not files_match if write_file: shutil.copy2(output.GetOutputFilename() + '.tmp', output.GetOutputFilename()) os.remove(output.GetOutputFilename() + '.tmp') self.VerboseOut(' done.\n') # Print warnings if there are any duplicate shortcuts. warnings = shortcuts.GenerateDuplicateShortcutsWarnings( self.res.UberClique(), self.res.GetTcProject()) if warnings: print '\n'.join(warnings) # Print out any fallback warnings, and missing translation errors, and # exit with an error code if there are missing translations in a non-pseudo # and non-official build. warnings = (self.res.UberClique().MissingTranslationsReport(). encode('ascii', 'replace')) if warnings: self.VerboseOut(warnings) if self.res.UberClique().HasMissingTranslations(): print self.res.UberClique().missing_translations_ sys.exit(-1)
def WriteFile(self, filename, contents, encoding='cp1252'): f = util.WrapOutputStream(file(filename, 'wb'), encoding) f.write(contents) f.close()
def Process(self): # Update filenames with those provided by SCons if we're being invoked # from SCons. The list of SCons targets also includes all <structure> # node outputs, but it starts with our output files, in the order they # occur in the .grd if self.scons_targets: assert len(self.scons_targets) >= len(self.res.GetOutputFiles()) outfiles = self.res.GetOutputFiles() for ix in range(len(outfiles)): outfiles[ix].output_filename = os.path.abspath( self.scons_targets[ix]) else: for output in self.res.GetOutputFiles(): output.output_filename = os.path.abspath(os.path.join( self.output_directory, output.GetFilename())) # If there are whitelisted names, tag the tree once up front, this way # while looping through the actual output, it is just an attribute check. if self.whitelist_names: self.AddWhitelistTags(self.res, self.whitelist_names) for output in self.res.GetOutputFiles(): self.VerboseOut('Creating %s...' % output.GetFilename()) # Microsoft's RC compiler can only deal with single-byte or double-byte # files (no UTF-8), so we make all RC files UTF-16 to support all # character sets. if output.GetType() in ('rc_header', 'resource_map_header', 'resource_map_source', 'resource_file_map_source'): encoding = 'cp1252' elif output.GetType() in ('android', 'c_format', 'js_map_format', 'plist', 'plist_strings', 'doc', 'json'): encoding = 'utf_8' elif output.GetType() in ('chrome_messages_json'): # Chrome Web Store currently expects BOM for UTF-8 files :-( encoding = 'utf-8-sig' else: # TODO(gfeher) modify here to set utf-8 encoding for admx/adml encoding = 'utf_16' # Set the context, for conditional inclusion of resources self.res.SetOutputLanguage(output.GetLanguage()) self.res.SetOutputContext(output.GetContext()) self.res.SetDefines(self.defines) # Make the output directory if it doesn't exist. self.MakeDirectoriesTo(output.GetOutputFilename()) # Write the results to a temporary file and only overwrite the original # if the file changed. This avoids unnecessary rebuilds. outfile = self.fo_create(output.GetOutputFilename() + '.tmp', 'wb') if output.GetType() != 'data_package': outfile = util.WrapOutputStream(outfile, encoding) # Iterate in-order through entire resource tree, calling formatters on # the entry into a node and on exit out of it. with outfile: self.ProcessNode(self.res, output, outfile) # Now copy from the temp file back to the real output, but on Windows, # only if the real output doesn't exist or the contents of the file # changed. This prevents identical headers from being written and .cc # files from recompiling (which is painful on Windows). if not os.path.exists(output.GetOutputFilename()): os.rename(output.GetOutputFilename() + '.tmp', output.GetOutputFilename()) else: # CHROMIUM SPECIFIC CHANGE. # This clashes with gyp + vstudio, which expect the output timestamp # to change on a rebuild, even if nothing has changed. #files_match = filecmp.cmp(output.GetOutputFilename(), # output.GetOutputFilename() + '.tmp') #if (output.GetType() != 'rc_header' or not files_match # or sys.platform != 'win32'): shutil.copy2(output.GetOutputFilename() + '.tmp', output.GetOutputFilename()) os.remove(output.GetOutputFilename() + '.tmp') self.VerboseOut(' done.\n') # Print warnings if there are any duplicate shortcuts. warnings = shortcuts.GenerateDuplicateShortcutsWarnings( self.res.UberClique(), self.res.GetTcProject()) if warnings: print '\n'.join(warnings) # Print out any fallback warnings, and missing translation errors, and # exit with an error code if there are missing translations in a non-pseudo # and non-official build. warnings = (self.res.UberClique().MissingTranslationsReport(). encode('ascii', 'replace')) if warnings: self.VerboseOut(warnings) if self.res.UberClique().HasMissingTranslations(): print self.res.UberClique().missing_translations_ sys.exit(-1)
def Process(self): for output in self.res.GetOutputFiles(): output.output_filename = os.path.abspath( os.path.join(self.output_directory, output.GetOutputFilename())) # If there are allowlisted names, tag the tree once up front, this way # while looping through the actual output, it is just an attribute check. if self.allowlist_names: self.AddAllowlistTags(self.res, self.allowlist_names) for output in self.res.GetOutputFiles(): self.VerboseOut('Creating %s...' % output.GetOutputFilename()) # Set the context, for conditional inclusion of resources self.res.SetOutputLanguage(output.GetLanguage()) self.res.SetOutputContext(output.GetContext()) self.res.SetFallbackToDefaultLayout( output.GetFallbackToDefaultLayout()) self.res.SetDefines(self.defines) # Assign IDs only once to ensure that all outputs use the same IDs. if self.res.GetIdMap() is None: self.res.InitializeIds() # Make the output directory if it doesn't exist. self.MakeDirectoriesTo(output.GetOutputFilename()) # Write the results to a temporary file and only overwrite the original # if the file changed. This avoids unnecessary rebuilds. out_filename = output.GetOutputFilename() tmp_filename = out_filename + '.tmp' tmpfile = self.fo_create(tmp_filename, 'wb') output_type = output.GetType() if output_type != 'data_package': encoding = self._EncodingForOutputType(output_type) tmpfile = util.WrapOutputStream(tmpfile, encoding) # Iterate in-order through entire resource tree, calling formatters on # the entry into a node and on exit out of it. with tmpfile: self.ProcessNode(self.res, output, tmpfile) if output_type == 'chrome_messages_json_gzip': gz_filename = tmp_filename + '.gz' with open(tmp_filename, 'rb') as tmpfile, open(gz_filename, 'wb') as f: with gzip.GzipFile(filename='', mode='wb', fileobj=f, mtime=0) as fgz: shutil.copyfileobj(tmpfile, fgz) os.remove(tmp_filename) tmp_filename = gz_filename # Now copy from the temp file back to the real output, but on Windows, # only if the real output doesn't exist or the contents of the file # changed. This prevents identical headers from being written and .cc # files from recompiling (which is painful on Windows). if not os.path.exists(out_filename): os.rename(tmp_filename, out_filename) else: # CHROMIUM SPECIFIC CHANGE. # This clashes with gyp + vstudio, which expect the output timestamp # to change on a rebuild, even if nothing has changed, so only do # it when opted in. if not self.write_only_new: write_file = True else: files_match = filecmp.cmp(out_filename, tmp_filename) write_file = not files_match if write_file: shutil.copy2(tmp_filename, out_filename) os.remove(tmp_filename) self.VerboseOut(' done.\n') # Print warnings if there are any duplicate shortcuts. warnings = shortcuts.GenerateDuplicateShortcutsWarnings( self.res.UberClique(), self.res.GetTcProject()) if warnings: print('\n'.join(warnings)) # Print out any fallback warnings, and missing translation errors, and # exit with an error code if there are missing translations in a non-pseudo # and non-official build. warnings = self.res.UberClique().MissingTranslationsReport() if warnings: self.VerboseOut(warnings) if self.res.UberClique().HasMissingTranslations(): print(self.res.UberClique().missing_translations_) sys.exit(-1)
def WriteFile(self, filename, contents, encoding='cp1252'): with open(filename, 'wb') as f: writer = util.WrapOutputStream(f, encoding) writer.write(contents)
def Process(self): # Update filenames with those provided by SCons if we're being invoked # from SCons. The list of SCons targets also includes all <structure> # node outputs, but it starts with our output files, in the order they # occur in the .grd if self.scons_targets: assert len(self.scons_targets) >= len(self.res.GetOutputFiles()) outfiles = self.res.GetOutputFiles() for ix in range(len(outfiles)): outfiles[ix].output_filename = os.path.abspath( self.scons_targets[ix]) else: for output in self.res.GetOutputFiles(): output.output_filename = os.path.abspath(os.path.join( self.output_directory, output.GetFilename())) for output in self.res.GetOutputFiles(): self.VerboseOut('Creating %s...' % output.GetFilename()) # Microsoft's RC compiler can only deal with single-byte or double-byte # files (no UTF-8), so we make all RC files UTF-16 to support all # character sets. if output.GetType() in ('rc_header', 'resource_map_header', 'resource_map_source'): encoding = 'cp1252' elif output.GetType() == 'js_map_format': encoding = 'utf_8' else: encoding = 'utf_16' # Make the output directory if it doesn't exist. outdir = os.path.split(output.GetOutputFilename())[0] if not os.path.exists(outdir): os.makedirs(outdir) # Write the results to a temporary file and only overwrite the original # if the file changed. This avoids unnecessary rebuilds. outfile = self.fo_create(output.GetOutputFilename() + '.tmp', 'wb') if output.GetType() != 'data_package': outfile = util.WrapOutputStream(outfile, encoding) # Set the context, for conditional inclusion of resources self.res.SetOutputContext(output.GetLanguage(), self.defines) # TODO(joi) Handle this more gracefully import grit.format.rc_header grit.format.rc_header.Item.ids_ = {} # Iterate in-order through entire resource tree, calling formatters on # the entry into a node and on exit out of it. self.ProcessNode(self.res, output, outfile) outfile.close() # Now copy from the temp file back to the real output, but on Windows, # only if the real output doesn't exist or the contents of the file # changed. This prevents identical headers from being written and .cc # files from recompiling (which is painful on Windows). if not os.path.exists(output.GetOutputFilename()): os.rename(output.GetOutputFilename() + '.tmp', output.GetOutputFilename()) else: # CHROMIUM SPECIFIC CHANGE. # This clashes with gyp + vstudio, which expect the output timestamp # to change on a rebuild, even if nothing has changed. #files_match = filecmp.cmp(output.GetOutputFilename(), # output.GetOutputFilename() + '.tmp') #if (output.GetType() != 'rc_header' or not files_match # or sys.platform != 'win32'): shutil.copy2(output.GetOutputFilename() + '.tmp', output.GetOutputFilename()) os.remove(output.GetOutputFilename() + '.tmp') self.VerboseOut(' done.\n') # Print warnings if there are any duplicate shortcuts. warnings = shortcuts.GenerateDuplicateShortcutsWarnings( self.res.UberClique(), self.res.GetTcProject()) if warnings: print '\n'.join(warnings) # Print out any fallback warnings, and missing translation errors, and # exit with an error code if there are missing translations in a non-pseudo # build warnings = (self.res.UberClique().MissingTranslationsReport(). encode('ascii', 'replace')) if warnings: print warnings if self.res.UberClique().HasMissingTranslations(): sys.exit(-1)