def test_duplicate(self): self.assertTrue(os.path.isdir(self.tmp)) self.reference("""foo = fooVal bar = barVal eff = effVal foo = other val for foo""") self.localized("""foo = localized bar = lBar eff = localized eff bar = duplicated bar """) cc = ContentComparer([Observer()]) cc.compare(File(self.ref, "en-reference.ftl", ""), File(self.l10n, "l10n.ftl", ""), mozpath.join(self.tmp, "merge", "l10n.ftl")) self.assertDictEqual( cc.observers[0].toJSON(), {'summary': {None: { 'errors': 1, 'warnings': 1, 'changed': 3, 'changed_w': 6 }}, 'details': { 'l10n.ftl': [ {'warning': u'foo occurs 2 times'}, {'error': u'bar occurs 2 times'}] } }) mergefile = mozpath.join(self.tmp, "merge", "l10n.ftl") self.assertFalse(os.path.isfile(mergefile))
def testGood(self): self.reference("""\ foo = fooVal bar = barVal -eff = effVal """) self.localized("""\ foo = lFoo bar = lBar -eff = lEff """) cc = ContentComparer([Observer()]) cc.compare(File(self.ref, "en-reference.ftl", ""), File(self.l10n, "l10n.ftl", ""), mozpath.join(self.tmp, "merge", "l10n.ftl")) self.assertDictEqual(cc.observers[0].toJSON(), { 'summary': { None: { 'changed': 3, 'changed_w': 3 } }, 'details': {} }) # validate merge results mergepath = mozpath.join(self.tmp, "merge", "l10n.ftl") self.assert_(not os.path.exists(mergepath))
def test_duplicate(self): self.assertTrue(os.path.isdir(self.tmp)) self.reference("""foo = fooVal bar = barVal eff = effVal foo = other val for foo""") self.localized("""foo = localized bar = lBar eff = localized eff bar = duplicated bar """) cc = ContentComparer([Observer()]) cc.compare(File(self.ref, "en-reference.ftl", ""), File(self.l10n, "l10n.ftl", ""), mozpath.join(self.tmp, "merge", "l10n.ftl")) self.assertDictEqual( cc.observers[0].toJSON(), { 'summary': { None: { 'errors': 1, 'warnings': 1, 'changed': 3, 'changed_w': 6 } }, 'details': { 'l10n.ftl': [{ 'warning': u'foo occurs 2 times' }, { 'error': u'bar occurs 2 times' }] } }) mergefile = mozpath.join(self.tmp, "merge", "l10n.ftl") self.assertFalse(os.path.isfile(mergefile))
def testObsoleteStandaloneComment(self): self.reference("""\ foo = fooVal bar = barVal """) self.localized("""\ foo = lFoo // Standalone Comment bar = lBar """) cc = ContentComparer([Observer()]) cc.compare(File(self.ref, "en-reference.ftl", ""), File(self.l10n, "l10n.ftl", ""), mozpath.join(self.tmp, "merge", "l10n.ftl")) self.assertDictEqual(cc.observers[0].toJSON(), { 'details': {}, 'summary': { None: { 'changed': 2, 'changed_w': 2, } } }) # validate merge results mergepath = mozpath.join(self.tmp, "merge", "l10n.ftl") self.assert_(not os.path.exists(mergepath))
def test_duplicate_attributes(self): self.assertTrue(os.path.isdir(self.tmp)) self.reference("""foo = fooVal .attr = good""") self.localized("""foo = localized .attr = not .attr = so .attr = good """) cc = ContentComparer([Observer()]) cc.compare(File(self.ref, "en-reference.ftl", ""), File(self.l10n, "l10n.ftl", ""), mozpath.join(self.tmp, "merge", "l10n.ftl")) self.assertDictEqual( cc.observers[0].toJSON(), { 'summary': { None: { 'warnings': 1, 'changed': 1, 'changed_w': 2 } }, 'details': { 'l10n.ftl': [{ 'warning': u'Attribute "attr" occurs 3 times ' u'at line 4, column 5 for foo' }] } }) mergefile = mozpath.join(self.tmp, "merge", "l10n.ftl") self.assertFalse(os.path.isfile(mergefile))
def main(argv): parser = argparse.ArgumentParser(description="Merge l10n files.") parser.add_argument("--output", help="Path to write merged output") parser.add_argument("--ref-file", help="Path to reference file (en-US)") parser.add_argument("--l10n-file", help="Path to locale file") args = parser.parse_args(argv) from compare_locales.compare import ( ContentComparer, Observer, ) from compare_locales.paths import File cc = ContentComparer([Observer()]) cc.compare( File(args.ref_file, args.ref_file, ""), File(args.l10n_file, args.l10n_file, ""), args.output, ) ensureParentDir(args.output) if not os.path.exists(args.output): src = args.l10n_file if not os.path.exists(args.l10n_file): src = args.ref_file shutil.copy(src, args.output) return 0
def testMissing(self): self.assertTrue(os.path.isdir(self.tmp)) self.reference("""foo = fooVal bar = barVal eff = effVal""") self.localized("""bar = lBar """) cc = ContentComparer() cc.set_merge_stage(os.path.join(self.tmp, "merge")) cc.compare(File(self.ref, "en-reference.properties", ""), File(self.l10n, "l10n.properties", "")) self.assertDictEqual( cc.observer.toJSON(), {'summary': {None: { 'changed': 1, 'missing': 2 }}, 'details': { 'children': [ ('l10n.properties', {'value': {'missingEntity': [u'eff', u'foo']}} ) ]} } ) mergefile = os.path.join(self.tmp, "merge", "l10n.properties") self.assertTrue(os.path.isfile(mergefile)) p = getParser(mergefile) p.readFile(mergefile) [m, n] = p.parse() self.assertEqual(map(lambda e: e.key, m), ["bar", "eff", "foo"])
def testMissing(self): self.assertTrue(os.path.isdir(self.tmp)) self.reference("""foo = fooVal bar = barVal eff = effVal""") self.localized("""bar = lBar """) cc = ContentComparer([Observer()]) cc.compare(File(self.ref, "en-reference.properties", ""), File(self.l10n, "l10n.properties", ""), mozpath.join(self.tmp, "merge", "l10n.properties")) self.assertDictEqual( cc.observers[0].toJSON(), { 'summary': { None: { 'changed': 1, 'changed_w': 1, 'missing': 2, 'missing_w': 2 } }, 'details': { 'l10n.properties': [{ 'missingEntity': u'foo' }, { 'missingEntity': u'eff' }] } }) mergefile = mozpath.join(self.tmp, "merge", "l10n.properties") self.assertTrue(os.path.isfile(mergefile)) p = getParser(mergefile) p.readFile(mergefile) [m, n] = p.parse() self.assertEqual(map(lambda e: e.key, m), ["bar", "foo", "eff"])
def testMissing(self): self.reference("""\ foo = fooVal bar = barVal eff = effVal """) self.localized("""\ foo = lFoo eff = lEff """) cc = ContentComparer([Observer()]) cc.compare(File(self.ref, "en-reference.ftl", ""), File(self.l10n, "l10n.ftl", ""), mozpath.join(self.tmp, "merge", "l10n.ftl")) self.assertDictEqual( cc.observers[0].toJSON(), { 'details': { 'l10n.ftl': [{ 'missingEntity': u'bar' }], }, 'summary': { None: { 'changed': 2, 'changed_w': 2, 'missing': 1, 'missing_w': 1 } } }) # validate merge results mergepath = mozpath.join(self.tmp, "merge", "l10n.ftl") self.assert_(not os.path.exists(mergepath))
def test_duplicate_attributes(self): self.assertTrue(os.path.isdir(self.tmp)) self.reference("""foo = fooVal .attr = good""") self.localized("""foo = localized .attr = not .attr = so .attr = good """) cc = ContentComparer([Observer()]) cc.compare(File(self.ref, "en-reference.ftl", ""), File(self.l10n, "l10n.ftl", ""), mozpath.join(self.tmp, "merge", "l10n.ftl")) self.assertDictEqual( cc.observers[0].toJSON(), {'summary': {None: { 'warnings': 1, 'changed': 1, 'changed_w': 2 }}, 'details': { 'l10n.ftl': [ {'warning': u'Attribute "attr" occurs 3 times ' u'at line 4, column 5 for foo' }] } }) mergefile = mozpath.join(self.tmp, "merge", "l10n.ftl") self.assertFalse(os.path.isfile(mergefile))
def testObsoleteStandaloneComment(self): self.reference("""\ foo = fooVal bar = barVal """) self.localized("""\ foo = lFoo // Standalone Comment bar = lBar """) cc = ContentComparer([Observer()]) cc.compare(File(self.ref, "en-reference.ftl", ""), File(self.l10n, "l10n.ftl", ""), mozpath.join(self.tmp, "merge", "l10n.ftl")) self.assertDictEqual( cc.observers[0].toJSON(), { 'details': {}, 'summary': { None: { 'changed': 2, 'changed_w': 2, } } } ) # validate merge results mergepath = mozpath.join(self.tmp, "merge", "l10n.ftl") self.assert_(not os.path.exists(mergepath))
def testGood(self): self.assertTrue(os.path.isdir(self.tmp)) self.reference("""foo = fooVal word bar = barVal word eff = effVal""") self.localized("""foo = lFoo bar = lBar eff = lEff word """) cc = ContentComparer([Observer()]) cc.compare(File(self.ref, "en-reference.properties", ""), File(self.l10n, "l10n.properties", ""), mozpath.join(self.tmp, "merge", "l10n.properties")) self.assertDictEqual( cc.observers[0].toJSON(), {'summary': {None: { 'changed': 3, 'changed_w': 5 }}, 'details': {} } ) self.assert_(not os.path.exists(mozpath.join(self.tmp, "merge", 'l10n.properties')))
def testMatchingReferences(self): self.reference("""\ foo = Reference { bar } """) self.localized("""\ foo = Localized { bar } """) cc = ContentComparer([Observer()]) cc.compare(File(self.ref, "en-reference.ftl", ""), File(self.l10n, "l10n.ftl", ""), mozpath.join(self.tmp, "merge", "l10n.ftl")) self.assertDictEqual(cc.observers[0].toJSON(), { 'details': {}, 'summary': { None: { 'changed': 1, 'changed_w': 1 } } }) # validate merge results mergepath = mozpath.join(self.tmp, "merge", "l10n.ftl") self.assert_(not os.path.exists(mergepath))
def testGood(self): self.assertTrue(os.path.isdir(self.tmp)) self.reference("""<!ENTITY foo 'fooVal'> <!ENTITY bar 'barVal'> <!ENTITY eff 'effVal'>""") self.localized("""<!ENTITY foo 'lFoo'> <!ENTITY bar 'lBar'> <!ENTITY eff 'lEff'> """) cc = ContentComparer([Observer()]) cc.compare(File(self.ref, "en-reference.dtd", ""), File(self.l10n, "l10n.dtd", ""), mozpath.join(self.tmp, "merge", "l10n.dtd")) self.assertDictEqual( cc.observers[0].toJSON(), {'summary': {None: { 'changed': 3, 'changed_w': 3 }}, 'details': {} } ) self.assert_( not os.path.exists(mozpath.join(self.tmp, "merge", 'l10n.dtd')))
def test_reference_xml_error(self): self.assertTrue(os.path.isdir(self.tmp)) self.reference("""<!ENTITY foo 'fooVal'> <!ENTITY bar 'bad &val'> <!ENTITY eff 'effVal'>""") self.localized("""<!ENTITY foo 'fooVal'> <!ENTITY bar 'good val'> <!ENTITY eff 'effVal'> """) cc = ContentComparer([Observer()]) cc.compare(File(self.ref, "en-reference.dtd", ""), File(self.l10n, "l10n.dtd", ""), mozpath.join(self.tmp, "merge", "l10n.dtd")) self.assertDictEqual( cc.observers[0].toJSON(), {'summary': {None: { 'warnings': 1, 'unchanged': 2, 'unchanged_w': 2, 'changed': 1, 'changed_w': 2 }}, 'details': { 'l10n.dtd': [ {'warning': u"can't parse en-US value at line 1, " u"column 0 for bar"}] } })
def testObsolete(self): self.assertTrue(os.path.isdir(self.tmp)) self.reference("""foo = fooVal eff = effVal""") self.localized("""foo = fooVal other = obsolete eff = leffVal """) cc = ContentComparer([Observer()]) cc.compare(File(self.ref, "en-reference.properties", ""), File(self.l10n, "l10n.properties", ""), mozpath.join(self.tmp, "merge", "l10n.properties")) self.assertDictEqual( cc.observers[0].toJSON(), {'summary': {None: { 'changed': 1, 'changed_w': 1, 'obsolete': 1, 'unchanged': 1, 'unchanged_w': 1 }}, 'details': { 'l10n.properties': [ {'obsoleteEntity': u'other'}] } })
def testObsolete(self): self.assertTrue(os.path.isdir(self.tmp)) self.reference("""foo = fooVal eff = effVal""") self.localized("""foo = fooVal other = obsolete eff = leffVal """) cc = ContentComparer([Observer()]) cc.compare(File(self.ref, "en-reference.properties", ""), File(self.l10n, "l10n.properties", ""), mozpath.join(self.tmp, "merge", "l10n.properties")) self.assertDictEqual( cc.observers[0].toJSON(), { 'summary': { None: { 'changed': 1, 'changed_w': 1, 'obsolete': 1, 'unchanged': 1, 'unchanged_w': 1 } }, 'details': { 'l10n.properties': [{ 'obsoleteEntity': u'other' }] } })
def test_reference_xml_error(self): self.assertTrue(os.path.isdir(self.tmp)) self.reference("""<!ENTITY foo 'fooVal'> <!ENTITY bar 'bad &val'> <!ENTITY eff 'effVal'>""") self.localized("""<!ENTITY foo 'fooVal'> <!ENTITY bar 'good val'> <!ENTITY eff 'effVal'> """) cc = ContentComparer([Observer()]) cc.compare(File(self.ref, "en-reference.dtd", ""), File(self.l10n, "l10n.dtd", ""), mozpath.join(self.tmp, "merge", "l10n.dtd")) self.assertDictEqual( cc.observers[0].toJSON(), { 'summary': { None: { 'warnings': 1, 'unchanged': 2, 'unchanged_w': 2, 'changed': 1, 'changed_w': 2 } }, 'details': { 'l10n.dtd': [{ 'warning': u"can't parse en-US value at line 1, " u"column 0 for bar" }] } })
def testMissing(self): self.assertTrue(os.path.isdir(self.tmp)) self.reference("""foo = fooVal bar = barVal eff = effVal""") self.localized("""bar = lBar """) cc = ContentComparer([Observer()]) cc.compare(File(self.ref, "en-reference.properties", ""), File(self.l10n, "l10n.properties", ""), mozpath.join(self.tmp, "merge", "l10n.properties")) self.assertDictEqual( cc.observers[0].toJSON(), {'summary': {None: { 'changed': 1, 'changed_w': 1, 'missing': 2, 'missing_w': 2 }}, 'details': { 'l10n.properties': [ {'missingEntity': u'foo'}, {'missingEntity': u'eff'}] } }) mergefile = mozpath.join(self.tmp, "merge", "l10n.properties") self.assertTrue(os.path.isfile(mergefile)) p = getParser(mergefile) p.readFile(mergefile) [m, n] = p.parse() self.assertEqual(map(lambda e: e.key, m), ["bar", "foo", "eff"])
def testMismatchingAttributes(self): self.reference(""" foo = Foo bar = Bar .tender = Attribute value eff = Eff """) self.localized("""\ foo = lFoo .obsolete = attr bar = lBar eff = lEff """) cc = ContentComparer([Observer()]) cc.compare(File(self.ref, "en-reference.ftl", ""), File(self.l10n, "l10n.ftl", ""), mozpath.join(self.tmp, "merge", "l10n.ftl")) self.assertDictEqual( cc.observers[0].toJSON(), { 'details': { 'l10n.ftl': [ { 'error': u'Obsolete attribute: ' 'obsolete at line 2, column 3 for foo' }, { 'error': u'Missing attribute: tender at line 3,' ' column 1 for bar', }, ], }, 'summary': { None: { 'changed': 3, 'changed_w': 5, 'errors': 2 } } }) # validate merge results mergepath = mozpath.join(self.tmp, "merge", "l10n.ftl") self.assert_(os.path.exists(mergepath)) p = getParser(mergepath) p.readFile(mergepath) merged_entities, merged_map = p.parse() self.assertEqual([e.key for e in merged_entities], ["eff"]) merged_eff = merged_entities[merged_map['eff']] # eff should be l10n p.readFile(self.l10n) l10n_entities, l10n_map = p.parse() l10n_eff = l10n_entities[l10n_map['eff']] self.assertTrue(merged_eff.equals(l10n_eff))
def testMismatchingAttributes(self): self.reference(""" foo = Foo bar = Bar .tender = Attribute value eff = Eff """) self.localized("""\ foo = lFoo .obsolete = attr bar = lBar eff = lEff """) cc = ContentComparer([Observer()]) cc.compare(File(self.ref, "en-reference.ftl", ""), File(self.l10n, "l10n.ftl", ""), mozpath.join(self.tmp, "merge", "l10n.ftl")) self.assertDictEqual( cc.observers[0].toJSON(), { 'details': { 'l10n.ftl': [ { 'error': u'Obsolete attribute: ' 'obsolete at line 2, column 3 for foo' }, { 'error': u'Missing attribute: tender at line 3,' ' column 1 for bar', }, ], }, 'summary': { None: {'changed': 3, 'changed_w': 5, 'errors': 2} } } ) # validate merge results mergepath = mozpath.join(self.tmp, "merge", "l10n.ftl") self.assert_(os.path.exists(mergepath)) p = getParser(mergepath) p.readFile(mergepath) merged_entities, merged_map = p.parse() self.assertEqual([e.key for e in merged_entities], ["eff"]) merged_eff = merged_entities[merged_map['eff']] # eff should be l10n p.readFile(self.l10n) l10n_entities, l10n_map = p.parse() l10n_eff = l10n_entities[l10n_map['eff']] self.assertTrue(merged_eff.equals(l10n_eff))
def testMismatchingReferences(self): self.reference("""\ foo = Reference { bar } bar = Reference { baz } baz = Reference """) self.localized("""\ foo = Localized { qux } bar = Localized baz = Localized { qux } """) cc = ContentComparer([Observer()]) cc.compare(File(self.ref, "en-reference.ftl", ""), File(self.l10n, "l10n.ftl", ""), mozpath.join(self.tmp, "merge", "l10n.ftl")) self.assertDictEqual( cc.observers[0].toJSON(), { 'details': { 'l10n.ftl': [ { 'warning': u'Missing message reference: bar ' u'at line 1, column 1 for foo' }, { 'warning': u'Obsolete message reference: qux ' u'at line 1, column 19 for foo' }, { 'warning': u'Missing message reference: baz ' u'at line 2, column 1 for bar' }, { 'warning': u'Obsolete message reference: qux ' u'at line 3, column 19 for baz' }, ], }, 'summary': { None: { 'changed': 3, 'changed_w': 3, 'warnings': 4 } } }) # validate merge results mergepath = mozpath.join(self.tmp, "merge", "l10n.ftl") self.assert_(not os.path.exists(mergepath))
def testGood(self): self.assertTrue(os.path.isdir(self.tmp)) l10n = os.path.join(self.tmp, "l10n.dtd") open(l10n, "w").write("""<!ENTITY foo 'lFoo'> <!ENTITY bar 'lBar'> <!ENTITY eff 'lEff'> """) cc = ContentComparer() cc.set_merge_stage(os.path.join(self.tmp, "merge")) cc.compare(File(self.ref, "en-reference.dtd", ""), File(l10n, "l10n.dtd", "")) print cc.observer.serialize()
def testGood(self): self.assertTrue(os.path.isdir(self.tmp)) l10n = os.path.join(self.tmp, "l10n.properties") open(l10n, "w").write("""foo = lFoo bar = lBar eff = lEff """) cc = ContentComparer() cc.set_merge_stage(os.path.join(self.tmp, "merge")) cc.compare(File(self.ref, "en-reference.properties", ""), File(l10n, "l10n.properties", "")) print cc.observer.serialize()
def testMismatchingValues(self): self.reference(""" foo = Foo .foottr = something bar .tender = Attribute value """) self.localized("""\ foo .foottr = attr bar = lBar .tender = localized """) cc = ContentComparer([Observer()]) cc.compare(File(self.ref, "en-reference.ftl", ""), File(self.l10n, "l10n.ftl", ""), mozpath.join(self.tmp, "merge", "l10n.ftl")) self.assertDictEqual( cc.observers[0].toJSON(), { 'details': { 'l10n.ftl': [ { 'error': u'Missing value at line 1, column 1 for foo' }, { 'error': u'Obsolete value at line 3, column 7 for bar', }, ] }, 'summary': { None: { 'changed': 2, 'changed_w': 4, 'errors': 2 } } }) # validate merge results mergepath = mozpath.join(self.tmp, "merge", "l10n.ftl") self.assert_(os.path.exists(mergepath)) p = getParser(mergepath) p.readFile(mergepath) merged_entities, _ = p.parse() self.assertEqual([e.key for e in merged_entities], [])
def compare_web_app(basedir, locales, other_observer=None): '''Compare gaia-style web app. Optional arguments are: - other_observer. A object implementing notify(category, _file, data) The return values of that callback are ignored. ''' comparer = ContentComparer() if other_observer is not None: comparer.add_observer(other_observer) webapp_comp = WebAppCompare(basedir) webapp_comp.setWatcher(comparer) webapp_comp.compare(locales) return comparer.observer
def testMissing(self): self.assertTrue(os.path.isdir(self.tmp)) l10n = os.path.join(self.tmp, "l10n.dtd") open(l10n, "w").write("""<!ENTITY bar 'lBar'> """) cc = ContentComparer() cc.set_merge_stage(os.path.join(self.tmp, "merge")) cc.compare(File(self.ref, "en-reference.dtd", ""), File(l10n, "l10n.dtd", "")) print cc.observer.serialize() mergefile = os.path.join(self.tmp, "merge", "l10n.dtd") self.assertTrue(os.path.isfile(mergefile)) p = getParser(mergefile) p.readFile(mergefile) [m, n] = p.parse() self.assertEqual(map(lambda e: e.key, m), ["bar", "eff", "foo"])
def testMismatchingValues(self): self.reference(""" foo = Foo .foottr = something bar .tender = Attribute value """) self.localized("""\ foo .foottr = attr bar = lBar .tender = localized """) cc = ContentComparer([Observer()]) cc.compare(File(self.ref, "en-reference.ftl", ""), File(self.l10n, "l10n.ftl", ""), mozpath.join(self.tmp, "merge", "l10n.ftl")) self.assertDictEqual( cc.observers[0].toJSON(), { 'details': { 'l10n.ftl': [ { 'error': u'Missing value at line 1, column 1 for foo' }, { 'error': u'Obsolete value at line 3, column 7 for bar', }, ] }, 'summary': { None: {'changed': 2, 'changed_w': 4, 'errors': 2} } } ) # validate merge results mergepath = mozpath.join(self.tmp, "merge", "l10n.ftl") self.assert_(os.path.exists(mergepath)) p = getParser(mergepath) p.readFile(mergepath) merged_entities, _ = p.parse() self.assertEqual([e.key for e in merged_entities], [])
def test_term_attributes(self): self.reference(""" -foo = Foo -bar = Bar -baz = Baz .attr = Baz Attribute -qux = Qux .attr = Qux Attribute -missing = Missing .attr = An Attribute """) self.localized("""\ -foo = Localized Foo -bar = Localized Bar .attr = Locale-specific Bar Attribute -baz = Localized Baz -qux = Localized Qux .other = Locale-specific Qux Attribute """) cc = ContentComparer([Observer()]) cc.compare(File(self.ref, "en-reference.ftl", ""), File(self.l10n, "l10n.ftl", ""), mozpath.join(self.tmp, "merge", "l10n.ftl")) self.assertDictEqual( cc.observers[0].toJSON(), { 'details': { 'l10n.ftl': [ { 'missingEntity': u'-missing' }, ], }, 'summary': { None: { 'changed': 4, 'changed_w': 4, 'missing': 1, 'missing_w': 1, } } }) # validate merge results mergepath = mozpath.join(self.tmp, "merge", "l10n.ftl") self.assert_(not os.path.exists(mergepath))
def testJunk(self): self.assertTrue(os.path.isdir(self.tmp)) self.reference("""<!ENTITY foo 'fooVal'> <!ENTITY bar 'barVal'> <!ENTITY eff 'effVal'>""") self.localized("""<!ENTITY foo 'fooVal'> <!ENTY bar 'gimmick'> <!ENTITY eff 'effVal'> """) cc = ContentComparer([Observer()]) cc.compare(File(self.ref, "en-reference.dtd", ""), File(self.l10n, "l10n.dtd", ""), mozpath.join(self.tmp, "merge", "l10n.dtd")) self.assertDictEqual( cc.observers[0].toJSON(), { 'summary': { None: { 'errors': 1, 'missing': 1, 'missing_w': 1, 'unchanged': 2, 'unchanged_w': 2 } }, 'details': { 'l10n.dtd': [{ 'error': u'Unparsed content "<!ENTY bar ' u'\'gimmick\'>" ' u'from line 2 column 1 to ' u'line 2 column 22' }, { 'missingEntity': u'bar' }] } }) mergefile = mozpath.join(self.tmp, "merge", "l10n.dtd") self.assertTrue(os.path.isfile(mergefile)) p = getParser(mergefile) p.readFile(mergefile) [m, n] = p.parse() self.assertEqual(map(lambda e: e.key, m), ["foo", "eff", "bar"])
def testError(self): self.assertTrue(os.path.isdir(self.tmp)) self.reference("""foo = fooVal bar = %d barVal eff = effVal""") self.localized("""\ bar = %S lBar eff = leffVal """) cc = ContentComparer([Observer()]) cc.compare(File(self.ref, "en-reference.properties", ""), File(self.l10n, "l10n.properties", ""), mozpath.join(self.tmp, "merge", "l10n.properties")) self.assertDictEqual( cc.observers[0].toJSON(), { 'summary': { None: { 'changed': 2, 'changed_w': 3, 'errors': 1, 'missing': 1, 'missing_w': 1 } }, 'details': { 'l10n.properties': [{ 'missingEntity': u'foo' }, { 'error': u'argument 1 `S` should be `d` ' u'at line 1, column 7 for bar' }] } }) mergefile = mozpath.join(self.tmp, "merge", "l10n.properties") self.assertTrue(os.path.isfile(mergefile)) p = getParser(mergefile) p.readFile(mergefile) [m, n] = p.parse() self.assertEqual([e.key for e in m], ["eff", "foo", "bar"]) self.assertEqual(m[n['bar']].val, '%d barVal')
def testObsolete(self): self.assertTrue(os.path.isdir(self.tmp)) self.reference("""foo = fooVal eff = effVal""") self.localized("""foo = fooVal other = obsolete eff = leffVal """) cc = ContentComparer() cc.set_merge_stage(os.path.join(self.tmp, "merge")) cc.compare(File(self.ref, "en-reference.properties", ""), File(self.l10n, "l10n.properties", "")) self.assertDictEqual( cc.observer.toJSON(), { 'summary': { None: { 'changed': 1, 'obsolete': 1, 'unchanged': 1 } }, 'details': { 'children': [('l10n.properties', { 'value': { 'obsoleteEntity': [u'other'] } })] }, })
def testJunk(self): self.assertTrue(os.path.isdir(self.tmp)) self.reference("""<!ENTITY foo 'fooVal'> <!ENTITY bar 'barVal'> <!ENTITY eff 'effVal'>""") self.localized("""<!ENTITY foo 'fooVal'> <!ENTY bar 'gimmick'> <!ENTITY eff 'effVal'> """) cc = ContentComparer() cc.set_merge_stage(os.path.join(self.tmp, "merge")) cc.compare(File(self.ref, "en-reference.dtd", ""), File(self.l10n, "l10n.dtd", "")) self.assertDictEqual( cc.observer.toJSON(), {'summary': {None: { 'errors': 1, 'missing': 1, 'unchanged': 2 }}, 'details': { 'children': [ ('l10n.dtd', {'value': { 'error': [u'Unparsed content "<!ENTY bar ' u'\'gimmick\'>" ' u'from line 2 colum 1 to ' u'line 2 column 22'], 'missingEntity': [u'bar']}} ) ]} } ) mergefile = os.path.join(self.tmp, "merge", "l10n.dtd") self.assertTrue(os.path.isfile(mergefile)) p = getParser(mergefile) p.readFile(mergefile) [m, n] = p.parse() self.assertEqual(map(lambda e: e.key, m), ["foo", "eff", "bar"])
def test_matching_tags(self): self.assertTrue(os.path.isdir(self.tmp)) self.reference("""foo = fooVal #yes """) self.localized("""foo = fooVal #yes """) cc = ContentComparer([Observer()]) cc.compare(File(self.ref, "en-reference.ftl", ""), File(self.l10n, "l10n.ftl", ""), mozpath.join(self.tmp, "merge", "l10n.ftl")) self.assertDictEqual( cc.observers[0].toJSON(), {'summary': {None: { 'unchanged': 1, 'unchanged_w': 1 }}, 'details': {} }) mergefile = mozpath.join(self.tmp, "merge", "l10n.ftl") self.assertFalse(os.path.isfile(mergefile))
def test_matching_tags(self): self.assertTrue(os.path.isdir(self.tmp)) self.reference("""foo = fooVal #yes """) self.localized("""foo = fooVal #yes """) cc = ContentComparer([Observer()]) cc.compare(File(self.ref, "en-reference.ftl", ""), File(self.l10n, "l10n.ftl", ""), mozpath.join(self.tmp, "merge", "l10n.ftl")) self.assertDictEqual(cc.observers[0].toJSON(), { 'summary': { None: { 'unchanged': 1, 'unchanged_w': 1 } }, 'details': {} }) mergefile = mozpath.join(self.tmp, "merge", "l10n.ftl") self.assertFalse(os.path.isfile(mergefile))
def testError(self): self.assertTrue(os.path.isdir(self.tmp)) self.reference("""foo = fooVal bar = %d barVal eff = effVal""") self.localized("""\ bar = %S lBar eff = leffVal """) cc = ContentComparer() cc.set_merge_stage(os.path.join(self.tmp, "merge")) cc.compare(File(self.ref, "en-reference.properties", ""), File(self.l10n, "l10n.properties", "")) self.assertDictEqual( cc.observer.toJSON(), {'summary': {None: { 'changed': 2, 'errors': 1, 'missing': 1 }}, 'details': { 'children': [ ('l10n.properties', {'value': { 'error': [u'argument 1 `S` should be `d` ' u'at line 1, column 7 for bar'], 'missingEntity': [u'foo']}} ) ]} } ) mergefile = os.path.join(self.tmp, "merge", "l10n.properties") self.assertTrue(os.path.isfile(mergefile)) p = getParser(mergefile) p.readFile(mergefile) [m, n] = p.parse() self.assertEqual([e.key for e in m], ["eff", "foo", "bar"]) self.assertEqual(m[n['bar']].val, '%d barVal')
def testMissing(self): self.reference("""\ foo = fooVal bar = barVal eff = effVal """) self.localized("""\ foo = lFoo eff = lEff """) cc = ContentComparer([Observer()]) cc.compare(File(self.ref, "en-reference.ftl", ""), File(self.l10n, "l10n.ftl", ""), mozpath.join(self.tmp, "merge", "l10n.ftl")) self.assertDictEqual( cc.observers[0].toJSON(), { 'details': { 'l10n.ftl': [ {'missingEntity': u'bar'} ], }, 'summary': { None: { 'changed': 2, 'changed_w': 2, 'missing': 1, 'missing_w': 1 } } } ) # validate merge results mergepath = mozpath.join(self.tmp, "merge", "l10n.ftl") self.assert_(not os.path.exists(mergepath))
def testObsolete(self): self.assertTrue(os.path.isdir(self.tmp)) self.reference("""foo = fooVal eff = effVal""") self.localized("""foo = fooVal other = obsolete eff = leffVal """) cc = ContentComparer() cc.set_merge_stage(os.path.join(self.tmp, "merge")) cc.compare(File(self.ref, "en-reference.properties", ""), File(self.l10n, "l10n.properties", "")) self.assertDictEqual( cc.observer.toJSON(), {'summary': {None: { 'changed': 1, 'obsolete': 1, 'unchanged': 1 }}, 'details': { 'children': [ ('l10n.properties', {'value': {'obsoleteEntity': [u'other']}})]}, } )
def testGood(self): self.assertTrue(os.path.isdir(self.tmp)) self.reference("""foo = fooVal bar = barVal eff = effVal""") self.localized("""foo = lFoo bar = lBar eff = lEff """) cc = ContentComparer() cc.set_merge_stage(os.path.join(self.tmp, "merge")) cc.compare(File(self.ref, "en-reference.properties", ""), File(self.l10n, "l10n.properties", "")) self.assertDictEqual( cc.observer.toJSON(), {'summary': {None: { 'changed': 3 }}, 'details': {} } ) self.assert_(not os.path.exists(os.path.join(cc.merge_stage, 'l10n.properties')))
def testGood(self): self.assertTrue(os.path.isdir(self.tmp)) self.reference("""foo = fooVal bar = barVal eff = effVal""") self.localized("""foo = lFoo bar = lBar eff = lEff """) cc = ContentComparer() cc.set_merge_stage(os.path.join(self.tmp, "merge")) cc.compare(File(self.ref, "en-reference.properties", ""), File(self.l10n, "l10n.properties", "")) self.assertDictEqual(cc.observer.toJSON(), { 'summary': { None: { 'changed': 3 } }, 'details': {} }) self.assert_(not os.path.exists( os.path.join(cc.merge_stage, 'l10n.properties')))
def testJunk(self): self.assertTrue(os.path.isdir(self.tmp)) self.reference("""<!ENTITY foo 'fooVal'> <!ENTITY bar 'barVal'> <!ENTITY eff 'effVal'>""") self.localized("""<!ENTITY foo 'fooVal'> <!ENTY bar 'gimmick'> <!ENTITY eff 'effVal'> """) cc = ContentComparer() cc.set_merge_stage(os.path.join(self.tmp, "merge")) cc.compare(File(self.ref, "en-reference.dtd", ""), File(self.l10n, "l10n.dtd", "")) self.assertDictEqual( cc.observer.toJSON(), {'summary': {None: { 'errors': 1, 'missing': 1, 'unchanged': 2 }}, 'details': { 'children': [ ('l10n.dtd', {'value': { 'error': [u'Unparsed content "<!ENTY bar ' u'\'gimmick\'>" at 23-44'], 'missingEntity': [u'bar']}} ) ]} } ) mergefile = os.path.join(self.tmp, "merge", "l10n.dtd") self.assertTrue(os.path.isfile(mergefile)) p = getParser(mergefile) p.readFile(mergefile) [m, n] = p.parse() self.assertEqual(map(lambda e: e.key, m), ["foo", "eff", "bar"])
def testBroken(self): self.reference("""\ foo = fooVal bar = barVal eff = effVal """) self.localized("""\ -- Invalid Comment foo = lFoo bar lBar eff = lEff { """) cc = ContentComparer([Observer()]) cc.compare(File(self.ref, "en-reference.ftl", ""), File(self.l10n, "l10n.ftl", ""), mozpath.join(self.tmp, "merge", "l10n.ftl")) self.assertDictEqual( cc.observers[0].toJSON(), { 'details': { 'l10n.ftl': [ {'error': u'Unparsed content "-- Invalid Comment" ' u'from line 1 column 1 ' u'to line 1 column 19'}, {'error': u'Unparsed content "bar lBar" ' u'from line 3 column 1 ' u'to line 3 column 9'}, {'error': u'Unparsed content "eff = lEff {" ' u'from line 4 column 1 ' u'to line 4 column 13'}, {'missingEntity': u'bar'}, {'missingEntity': u'eff'}, ], }, 'summary': { None: { 'changed': 1, 'changed_w': 1, 'missing': 2, 'missing_w': 2, 'errors': 3 } } } ) # validate merge results mergepath = mozpath.join(self.tmp, "merge", "l10n.ftl") self.assert_(os.path.exists(mergepath)) p = getParser(mergepath) p.readFile(mergepath) merged_entities, merged_map = p.parse() self.assertEqual([e.key for e in merged_entities], ["foo"]) merged_foo = merged_entities[merged_map['foo']] # foo should be l10n p.readFile(self.l10n) l10n_entities, l10n_map = p.parse() l10n_foo = l10n_entities[l10n_map['foo']] self.assertTrue(merged_foo.equals(l10n_foo))
def testBroken(self): self.reference("""\ foo = fooVal bar = barVal eff = effVal """) self.localized("""\ -- Invalid Comment foo = lFoo bar lBar eff = lEff { """) cc = ContentComparer([Observer()]) cc.compare(File(self.ref, "en-reference.ftl", ""), File(self.l10n, "l10n.ftl", ""), mozpath.join(self.tmp, "merge", "l10n.ftl")) self.assertDictEqual( cc.observers[0].toJSON(), { 'details': { 'l10n.ftl': [ { 'error': u'Unparsed content "-- Invalid Comment" ' u'from line 1 column 1 ' u'to line 1 column 19' }, { 'error': u'Unparsed content "bar lBar" ' u'from line 3 column 1 ' u'to line 3 column 9' }, { 'error': u'Unparsed content "eff = lEff {" ' u'from line 4 column 1 ' u'to line 4 column 13' }, { 'missingEntity': u'bar' }, { 'missingEntity': u'eff' }, ], }, 'summary': { None: { 'changed': 1, 'changed_w': 1, 'missing': 2, 'missing_w': 2, 'errors': 3 } } }) # validate merge results mergepath = mozpath.join(self.tmp, "merge", "l10n.ftl") self.assert_(os.path.exists(mergepath)) p = getParser(mergepath) p.readFile(mergepath) merged_entities, merged_map = p.parse() self.assertEqual([e.key for e in merged_entities], ["foo"]) merged_foo = merged_entities[merged_map['foo']] # foo should be l10n p.readFile(self.l10n) l10n_entities, l10n_map = p.parse() l10n_foo = l10n_entities[l10n_map['foo']] self.assertTrue(merged_foo.equals(l10n_foo))