def testMissing(self): self.reference("""\ foo = fooVal bar = barVal eff = effVal """) self.localized("""\ foo = lFoo eff = lEff """) cc = ContentComparer([Observer()]) cc.compare(File(self.ref, "en-reference.ftl", ""), File(self.l10n, "l10n.ftl", ""), mozpath.join(self.tmp, "merge", "l10n.ftl")) self.assertDictEqual( cc.observers[0].toJSON(), { 'details': { 'l10n.ftl': [{ 'missingEntity': u'bar' }], }, 'summary': { None: { 'changed': 2, 'changed_w': 2, 'missing': 1, 'missing_w': 1 } } }) # validate merge results mergepath = mozpath.join(self.tmp, "merge", "l10n.ftl") self.assert_(not os.path.exists(mergepath))
def main(argv): parser = argparse.ArgumentParser(description="Merge l10n files.") parser.add_argument("--output", help="Path to write merged output") parser.add_argument("--ref-file", help="Path to reference file (en-US)") parser.add_argument("--l10n-file", help="Path to locale file") args = parser.parse_args(argv) from compare_locales.compare import ( ContentComparer, Observer, ) from compare_locales.paths import File cc = ContentComparer([Observer()]) cc.compare( File(args.ref_file, args.ref_file, ""), File(args.l10n_file, args.l10n_file, ""), args.output, ) ensureParentDir(args.output) if not os.path.exists(args.output): src = args.l10n_file if not os.path.exists(args.l10n_file): src = args.ref_file shutil.copy(src, args.output) return 0
def testGood(self): self.assertTrue(os.path.isdir(self.tmp)) self.reference("""foo = fooVal word bar = barVal word eff = effVal""") self.localized("""foo = lFoo bar = lBar eff = lEff word """) cc = ContentComparer([Observer()]) cc.compare(File(self.ref, "en-reference.properties", ""), File(self.l10n, "l10n.properties", ""), mozpath.join(self.tmp, "merge", "l10n.properties")) self.assertDictEqual( cc.observers[0].toJSON(), {'summary': {None: { 'changed': 3, 'changed_w': 5 }}, 'details': {} } ) self.assert_(not os.path.exists(mozpath.join(self.tmp, "merge", 'l10n.properties')))
def testGood(self): self.assertTrue(os.path.isdir(self.tmp)) self.reference("""<!ENTITY foo 'fooVal'> <!ENTITY bar 'barVal'> <!ENTITY eff 'effVal'>""") self.localized("""<!ENTITY foo 'lFoo'> <!ENTITY bar 'lBar'> <!ENTITY eff 'lEff'> """) cc = ContentComparer([Observer()]) cc.compare(File(self.ref, "en-reference.dtd", ""), File(self.l10n, "l10n.dtd", ""), mozpath.join(self.tmp, "merge", "l10n.dtd")) self.assertDictEqual( cc.observers[0].toJSON(), {'summary': {None: { 'changed': 3, 'changed_w': 3 }}, 'details': {} } ) self.assert_( not os.path.exists(mozpath.join(self.tmp, "merge", 'l10n.dtd')))
def testMatchingReferences(self): self.reference("""\ foo = Reference { bar } """) self.localized("""\ foo = Localized { bar } """) cc = ContentComparer([Observer()]) cc.compare(File(self.ref, "en-reference.ftl", ""), File(self.l10n, "l10n.ftl", ""), mozpath.join(self.tmp, "merge", "l10n.ftl")) self.assertDictEqual(cc.observers[0].toJSON(), { 'details': {}, 'summary': { None: { 'changed': 1, 'changed_w': 1 } } }) # validate merge results mergepath = mozpath.join(self.tmp, "merge", "l10n.ftl") self.assert_(not os.path.exists(mergepath))
def testGood(self): self.reference("""\ foo = fooVal bar = barVal -eff = effVal """) self.localized("""\ foo = lFoo bar = lBar -eff = lEff """) cc = ContentComparer([Observer()]) cc.compare(File(self.ref, "en-reference.ftl", ""), File(self.l10n, "l10n.ftl", ""), mozpath.join(self.tmp, "merge", "l10n.ftl")) self.assertDictEqual(cc.observers[0].toJSON(), { 'summary': { None: { 'changed': 3, 'changed_w': 3 } }, 'details': {} }) # validate merge results mergepath = mozpath.join(self.tmp, "merge", "l10n.ftl") self.assert_(not os.path.exists(mergepath))
def test_duplicate_attributes(self): self.assertTrue(os.path.isdir(self.tmp)) self.reference("""foo = fooVal .attr = good""") self.localized("""foo = localized .attr = not .attr = so .attr = good """) cc = ContentComparer([Observer()]) cc.compare(File(self.ref, "en-reference.ftl", ""), File(self.l10n, "l10n.ftl", ""), mozpath.join(self.tmp, "merge", "l10n.ftl")) self.assertDictEqual( cc.observers[0].toJSON(), { 'summary': { None: { 'warnings': 1, 'changed': 1, 'changed_w': 2 } }, 'details': { 'l10n.ftl': [{ 'warning': u'Attribute "attr" occurs 3 times ' u'at line 4, column 5 for foo' }] } }) mergefile = mozpath.join(self.tmp, "merge", "l10n.ftl") self.assertFalse(os.path.isfile(mergefile))
def testObsoleteStandaloneComment(self): self.reference("""\ foo = fooVal bar = barVal """) self.localized("""\ foo = lFoo // Standalone Comment bar = lBar """) cc = ContentComparer([Observer()]) cc.compare(File(self.ref, "en-reference.ftl", ""), File(self.l10n, "l10n.ftl", ""), mozpath.join(self.tmp, "merge", "l10n.ftl")) self.assertDictEqual(cc.observers[0].toJSON(), { 'details': {}, 'summary': { None: { 'changed': 2, 'changed_w': 2, } } }) # validate merge results mergepath = mozpath.join(self.tmp, "merge", "l10n.ftl") self.assert_(not os.path.exists(mergepath))
def test_duplicate(self): self.assertTrue(os.path.isdir(self.tmp)) self.reference("""foo = fooVal bar = barVal eff = effVal foo = other val for foo""") self.localized("""foo = localized bar = lBar eff = localized eff bar = duplicated bar """) cc = ContentComparer([Observer()]) cc.compare(File(self.ref, "en-reference.ftl", ""), File(self.l10n, "l10n.ftl", ""), mozpath.join(self.tmp, "merge", "l10n.ftl")) self.assertDictEqual( cc.observers[0].toJSON(), { 'summary': { None: { 'errors': 1, 'warnings': 1, 'changed': 3, 'changed_w': 6 } }, 'details': { 'l10n.ftl': [{ 'warning': u'foo occurs 2 times' }, { 'error': u'bar occurs 2 times' }] } }) mergefile = mozpath.join(self.tmp, "merge", "l10n.ftl") self.assertFalse(os.path.isfile(mergefile))
def test_reference_xml_error(self): self.assertTrue(os.path.isdir(self.tmp)) self.reference("""<!ENTITY foo 'fooVal'> <!ENTITY bar 'bad &val'> <!ENTITY eff 'effVal'>""") self.localized("""<!ENTITY foo 'fooVal'> <!ENTITY bar 'good val'> <!ENTITY eff 'effVal'> """) cc = ContentComparer([Observer()]) cc.compare(File(self.ref, "en-reference.dtd", ""), File(self.l10n, "l10n.dtd", ""), mozpath.join(self.tmp, "merge", "l10n.dtd")) self.assertDictEqual( cc.observers[0].toJSON(), { 'summary': { None: { 'warnings': 1, 'unchanged': 2, 'unchanged_w': 2, 'changed': 1, 'changed_w': 2 } }, 'details': { 'l10n.dtd': [{ 'warning': u"can't parse en-US value at line 1, " u"column 0 for bar" }] } })
def testMissing(self): self.assertTrue(os.path.isdir(self.tmp)) self.reference("""foo = fooVal bar = barVal eff = effVal""") self.localized("""bar = lBar """) cc = ContentComparer([Observer()]) cc.compare(File(self.ref, "en-reference.properties", ""), File(self.l10n, "l10n.properties", ""), mozpath.join(self.tmp, "merge", "l10n.properties")) self.assertDictEqual( cc.observers[0].toJSON(), { 'summary': { None: { 'changed': 1, 'changed_w': 1, 'missing': 2, 'missing_w': 2 } }, 'details': { 'l10n.properties': [{ 'missingEntity': u'foo' }, { 'missingEntity': u'eff' }] } }) mergefile = mozpath.join(self.tmp, "merge", "l10n.properties") self.assertTrue(os.path.isfile(mergefile)) p = getParser(mergefile) p.readFile(mergefile) [m, n] = p.parse() self.assertEqual(map(lambda e: e.key, m), ["bar", "foo", "eff"])
def testObsolete(self): self.assertTrue(os.path.isdir(self.tmp)) self.reference("""foo = fooVal eff = effVal""") self.localized("""foo = fooVal other = obsolete eff = leffVal """) cc = ContentComparer([Observer()]) cc.compare(File(self.ref, "en-reference.properties", ""), File(self.l10n, "l10n.properties", ""), mozpath.join(self.tmp, "merge", "l10n.properties")) self.assertDictEqual( cc.observers[0].toJSON(), { 'summary': { None: { 'changed': 1, 'changed_w': 1, 'obsolete': 1, 'unchanged': 1, 'unchanged_w': 1 } }, 'details': { 'l10n.properties': [{ 'obsoleteEntity': u'other' }] } })
def testMismatchingAttributes(self): self.reference(""" foo = Foo bar = Bar .tender = Attribute value eff = Eff """) self.localized("""\ foo = lFoo .obsolete = attr bar = lBar eff = lEff """) cc = ContentComparer([Observer()]) cc.compare(File(self.ref, "en-reference.ftl", ""), File(self.l10n, "l10n.ftl", ""), mozpath.join(self.tmp, "merge", "l10n.ftl")) self.assertDictEqual( cc.observers[0].toJSON(), { 'details': { 'l10n.ftl': [ { 'error': u'Obsolete attribute: ' 'obsolete at line 2, column 3 for foo' }, { 'error': u'Missing attribute: tender at line 3,' ' column 1 for bar', }, ], }, 'summary': { None: { 'changed': 3, 'changed_w': 5, 'errors': 2 } } }) # validate merge results mergepath = mozpath.join(self.tmp, "merge", "l10n.ftl") self.assert_(os.path.exists(mergepath)) p = getParser(mergepath) p.readFile(mergepath) merged_entities, merged_map = p.parse() self.assertEqual([e.key for e in merged_entities], ["eff"]) merged_eff = merged_entities[merged_map['eff']] # eff should be l10n p.readFile(self.l10n) l10n_entities, l10n_map = p.parse() l10n_eff = l10n_entities[l10n_map['eff']] self.assertTrue(merged_eff.equals(l10n_eff))
def testMismatchingReferences(self): self.reference("""\ foo = Reference { bar } bar = Reference { baz } baz = Reference """) self.localized("""\ foo = Localized { qux } bar = Localized baz = Localized { qux } """) cc = ContentComparer([Observer()]) cc.compare(File(self.ref, "en-reference.ftl", ""), File(self.l10n, "l10n.ftl", ""), mozpath.join(self.tmp, "merge", "l10n.ftl")) self.assertDictEqual( cc.observers[0].toJSON(), { 'details': { 'l10n.ftl': [ { 'warning': u'Missing message reference: bar ' u'at line 1, column 1 for foo' }, { 'warning': u'Obsolete message reference: qux ' u'at line 1, column 19 for foo' }, { 'warning': u'Missing message reference: baz ' u'at line 2, column 1 for bar' }, { 'warning': u'Obsolete message reference: qux ' u'at line 3, column 19 for baz' }, ], }, 'summary': { None: { 'changed': 3, 'changed_w': 3, 'warnings': 4 } } }) # validate merge results mergepath = mozpath.join(self.tmp, "merge", "l10n.ftl") self.assert_(not os.path.exists(mergepath))
def testMismatchingValues(self): self.reference(""" foo = Foo .foottr = something bar .tender = Attribute value """) self.localized("""\ foo .foottr = attr bar = lBar .tender = localized """) cc = ContentComparer([Observer()]) cc.compare(File(self.ref, "en-reference.ftl", ""), File(self.l10n, "l10n.ftl", ""), mozpath.join(self.tmp, "merge", "l10n.ftl")) self.assertDictEqual( cc.observers[0].toJSON(), { 'details': { 'l10n.ftl': [ { 'error': u'Missing value at line 1, column 1 for foo' }, { 'error': u'Obsolete value at line 3, column 7 for bar', }, ] }, 'summary': { None: { 'changed': 2, 'changed_w': 4, 'errors': 2 } } }) # validate merge results mergepath = mozpath.join(self.tmp, "merge", "l10n.ftl") self.assert_(os.path.exists(mergepath)) p = getParser(mergepath) p.readFile(mergepath) merged_entities, _ = p.parse() self.assertEqual([e.key for e in merged_entities], [])
def test_term_attributes(self): self.reference(""" -foo = Foo -bar = Bar -baz = Baz .attr = Baz Attribute -qux = Qux .attr = Qux Attribute -missing = Missing .attr = An Attribute """) self.localized("""\ -foo = Localized Foo -bar = Localized Bar .attr = Locale-specific Bar Attribute -baz = Localized Baz -qux = Localized Qux .other = Locale-specific Qux Attribute """) cc = ContentComparer([Observer()]) cc.compare(File(self.ref, "en-reference.ftl", ""), File(self.l10n, "l10n.ftl", ""), mozpath.join(self.tmp, "merge", "l10n.ftl")) self.assertDictEqual( cc.observers[0].toJSON(), { 'details': { 'l10n.ftl': [ { 'missingEntity': u'-missing' }, ], }, 'summary': { None: { 'changed': 4, 'changed_w': 4, 'missing': 1, 'missing_w': 1, } } }) # validate merge results mergepath = mozpath.join(self.tmp, "merge", "l10n.ftl") self.assert_(not os.path.exists(mergepath))
def testJunk(self): self.assertTrue(os.path.isdir(self.tmp)) self.reference("""<!ENTITY foo 'fooVal'> <!ENTITY bar 'barVal'> <!ENTITY eff 'effVal'>""") self.localized("""<!ENTITY foo 'fooVal'> <!ENTY bar 'gimmick'> <!ENTITY eff 'effVal'> """) cc = ContentComparer([Observer()]) cc.compare(File(self.ref, "en-reference.dtd", ""), File(self.l10n, "l10n.dtd", ""), mozpath.join(self.tmp, "merge", "l10n.dtd")) self.assertDictEqual( cc.observers[0].toJSON(), { 'summary': { None: { 'errors': 1, 'missing': 1, 'missing_w': 1, 'unchanged': 2, 'unchanged_w': 2 } }, 'details': { 'l10n.dtd': [{ 'error': u'Unparsed content "<!ENTY bar ' u'\'gimmick\'>" ' u'from line 2 column 1 to ' u'line 2 column 22' }, { 'missingEntity': u'bar' }] } }) mergefile = mozpath.join(self.tmp, "merge", "l10n.dtd") self.assertTrue(os.path.isfile(mergefile)) p = getParser(mergefile) p.readFile(mergefile) [m, n] = p.parse() self.assertEqual(map(lambda e: e.key, m), ["foo", "eff", "bar"])
def testError(self): self.assertTrue(os.path.isdir(self.tmp)) self.reference("""foo = fooVal bar = %d barVal eff = effVal""") self.localized("""\ bar = %S lBar eff = leffVal """) cc = ContentComparer([Observer()]) cc.compare(File(self.ref, "en-reference.properties", ""), File(self.l10n, "l10n.properties", ""), mozpath.join(self.tmp, "merge", "l10n.properties")) self.assertDictEqual( cc.observers[0].toJSON(), { 'summary': { None: { 'changed': 2, 'changed_w': 3, 'errors': 1, 'missing': 1, 'missing_w': 1 } }, 'details': { 'l10n.properties': [{ 'missingEntity': u'foo' }, { 'error': u'argument 1 `S` should be `d` ' u'at line 1, column 7 for bar' }] } }) mergefile = mozpath.join(self.tmp, "merge", "l10n.properties") self.assertTrue(os.path.isfile(mergefile)) p = getParser(mergefile) p.readFile(mergefile) [m, n] = p.parse() self.assertEqual([e.key for e in m], ["eff", "foo", "bar"]) self.assertEqual(m[n['bar']].val, '%d barVal')
def test_matching_tags(self): self.assertTrue(os.path.isdir(self.tmp)) self.reference("""foo = fooVal #yes """) self.localized("""foo = fooVal #yes """) cc = ContentComparer([Observer()]) cc.compare(File(self.ref, "en-reference.ftl", ""), File(self.l10n, "l10n.ftl", ""), mozpath.join(self.tmp, "merge", "l10n.ftl")) self.assertDictEqual(cc.observers[0].toJSON(), { 'summary': { None: { 'unchanged': 1, 'unchanged_w': 1 } }, 'details': {} }) mergefile = mozpath.join(self.tmp, "merge", "l10n.ftl") self.assertFalse(os.path.isfile(mergefile))
def handle(self, config_paths, l10n_base_dir, locales, merge=None, defines=None, unified=False, full=False, quiet=0, clobber=False, data='text'): # using nargs multiple times in argparser totally screws things # up, repair that. # First files are configs, then the base dir, everything else is # locales all_args = config_paths + [l10n_base_dir] + locales config_paths = [] locales = [] if defines is None: defines = [] while all_args and not os.path.isdir(all_args[0]): config_paths.append(all_args.pop(0)) if not config_paths: self.parser.error('no configuration file given') for cf in config_paths: if not os.path.isfile(cf): self.parser.error('config file %s not found' % cf) if not all_args: self.parser.error('l10n-base-dir not found') l10n_base_dir = all_args.pop(0) locales.extend(all_args) # when we compare disabled projects, we set our locales # on all subconfigs, so deep is True. locales_deep = full configs = [] config_env = {} for define in defines: var, _, value = define.partition('=') config_env[var] = value for config_path in config_paths: if config_path.endswith('.toml'): try: config = TOMLParser.parse(config_path, env=config_env) except ConfigNotFound as e: self.parser.exit('config file %s not found' % e.filename) config.add_global_environment(l10n_base=l10n_base_dir) if locales: config.set_locales(locales, deep=locales_deep) configs.append(config) else: app = EnumerateApp(config_path, l10n_base_dir, locales) configs.append(app.asConfig()) try: unified_observer = None if unified: unified_observer = Observer(quiet=quiet) observers = compareProjects(configs, quiet=quiet, stat_observer=unified_observer, merge_stage=merge, clobber_merge=clobber) except (OSError, IOError), exc: print "FAIL: " + str(exc) self.parser.exit(2)
def handle(self, config_paths, l10n_base_dir, locales, merge=None, defines=None, unified=False, full=False, quiet=0, validate=False, clobber=False, data='text'): # using nargs multiple times in argparser totally screws things # up, repair that. # First files are configs, then the base dir, everything else is # locales all_args = config_paths + [l10n_base_dir] + locales config_paths = [] locales = [] if defines is None: defines = [] while all_args and not os.path.isdir(all_args[0]): config_paths.append(all_args.pop(0)) if not config_paths: self.parser.error('no configuration file given') for cf in config_paths: if not os.path.isfile(cf): self.parser.error('config file %s not found' % cf) if not all_args: self.parser.error('l10n-base-dir not found') l10n_base_dir = all_args.pop(0) if validate: # signal validation mode by setting locale list to [None] locales = [None] else: locales.extend(all_args) # when we compare disabled projects, we set our locales # on all subconfigs, so deep is True. locales_deep = full configs = [] config_env = {} for define in defines: var, _, value = define.partition('=') config_env[var] = value for config_path in config_paths: if config_path.endswith('.toml'): try: config = TOMLParser.parse(config_path, env=config_env) except ConfigNotFound as e: self.parser.exit('config file %s not found' % e.filename) config.add_global_environment(l10n_base=l10n_base_dir) if locales: config.set_locales(locales, deep=locales_deep) configs.append(config) else: app = EnumerateApp(config_path, l10n_base_dir, locales) configs.append(app.asConfig()) try: unified_observer = None if unified: unified_observer = Observer(quiet=quiet) observers = compareProjects(configs, quiet=quiet, stat_observer=unified_observer, merge_stage=merge, clobber_merge=clobber) except (OSError, IOError) as exc: print("FAIL: " + str(exc)) self.parser.exit(2) if unified: observers = [unified_observer] rv = 0 for observer in observers: print(observer.serialize(type=data)) # summary is a dict of lang-summary dicts # find out if any of our results has errors, return 1 if so if rv > 0: continue # we already have errors for loc, summary in observer.summary.items(): if summary.get('errors', 0) > 0: rv = 1 # no need to check further summaries, but # continue to run through observers break return rv
def testBroken(self): self.reference("""\ foo = fooVal bar = barVal eff = effVal """) self.localized("""\ -- Invalid Comment foo = lFoo bar lBar eff = lEff { """) cc = ContentComparer([Observer()]) cc.compare(File(self.ref, "en-reference.ftl", ""), File(self.l10n, "l10n.ftl", ""), mozpath.join(self.tmp, "merge", "l10n.ftl")) self.assertDictEqual( cc.observers[0].toJSON(), { 'details': { 'l10n.ftl': [ { 'error': u'Unparsed content "-- Invalid Comment" ' u'from line 1 column 1 ' u'to line 1 column 19' }, { 'error': u'Unparsed content "bar lBar" ' u'from line 3 column 1 ' u'to line 3 column 9' }, { 'error': u'Unparsed content "eff = lEff {" ' u'from line 4 column 1 ' u'to line 4 column 13' }, { 'missingEntity': u'bar' }, { 'missingEntity': u'eff' }, ], }, 'summary': { None: { 'changed': 1, 'changed_w': 1, 'missing': 2, 'missing_w': 2, 'errors': 3 } } }) # validate merge results mergepath = mozpath.join(self.tmp, "merge", "l10n.ftl") self.assert_(os.path.exists(mergepath)) p = getParser(mergepath) p.readFile(mergepath) merged_entities, merged_map = p.parse() self.assertEqual([e.key for e in merged_entities], ["foo"]) merged_foo = merged_entities[merged_map['foo']] # foo should be l10n p.readFile(self.l10n) l10n_entities, l10n_map = p.parse() l10n_foo = l10n_entities[l10n_map['foo']] self.assertTrue(merged_foo.equals(l10n_foo))