def testObsoleteStandaloneComment(self): self.reference("""\ foo = fooVal bar = barVal """) self.localized("""\ foo = lFoo // Standalone Comment bar = lBar """) cc = ContentComparer([Observer()]) cc.compare(File(self.ref, "en-reference.ftl", ""), File(self.l10n, "l10n.ftl", ""), mozpath.join(self.tmp, "merge", "l10n.ftl")) self.assertDictEqual( cc.observers[0].toJSON(), { 'details': {}, 'summary': { None: { 'changed': 2, 'changed_w': 2, } } } ) # validate merge results mergepath = mozpath.join(self.tmp, "merge", "l10n.ftl") self.assert_(not os.path.exists(mergepath))
def test_duplicate(self): self.assertTrue(os.path.isdir(self.tmp)) self.reference("""foo = fooVal bar = barVal eff = effVal foo = other val for foo""") self.localized("""foo = localized bar = lBar eff = localized eff bar = duplicated bar """) cc = ContentComparer([Observer()]) cc.compare(File(self.ref, "en-reference.ftl", ""), File(self.l10n, "l10n.ftl", ""), mozpath.join(self.tmp, "merge", "l10n.ftl")) self.assertDictEqual( cc.observers[0].toJSON(), {'summary': {None: { 'errors': 1, 'warnings': 1, 'changed': 3, 'changed_w': 6 }}, 'details': { 'l10n.ftl': [ {'warning': u'foo occurs 2 times'}, {'error': u'bar occurs 2 times'}] } }) mergefile = mozpath.join(self.tmp, "merge", "l10n.ftl") self.assertFalse(os.path.isfile(mergefile))
def testMissing(self): self.assertTrue(os.path.isdir(self.tmp)) self.reference("""foo = fooVal bar = barVal eff = effVal""") self.localized("""bar = lBar """) cc = ContentComparer([Observer()]) cc.compare(File(self.ref, "en-reference.properties", ""), File(self.l10n, "l10n.properties", ""), mozpath.join(self.tmp, "merge", "l10n.properties")) self.assertDictEqual( cc.observers[0].toJSON(), {'summary': {None: { 'changed': 1, 'changed_w': 1, 'missing': 2, 'missing_w': 2 }}, 'details': { 'l10n.properties': [ {'missingEntity': u'foo'}, {'missingEntity': u'eff'}] } }) mergefile = mozpath.join(self.tmp, "merge", "l10n.properties") self.assertTrue(os.path.isfile(mergefile)) p = getParser(mergefile) p.readFile(mergefile) [m, n] = p.parse() self.assertEqual(map(lambda e: e.key, m), ["bar", "foo", "eff"])
def testGood(self): self.assertTrue(os.path.isdir(self.tmp)) self.reference("""foo = fooVal word bar = barVal word eff = effVal""") self.localized("""foo = lFoo bar = lBar eff = lEff word """) cc = ContentComparer([Observer()]) cc.compare(File(self.ref, "en-reference.properties", ""), File(self.l10n, "l10n.properties", ""), mozpath.join(self.tmp, "merge", "l10n.properties")) self.assertDictEqual( cc.observers[0].toJSON(), {'summary': {None: { 'changed': 3, 'changed_w': 5 }}, 'details': {} } ) self.assert_(not os.path.exists(mozpath.join(self.tmp, "merge", 'l10n.properties')))
def testGood(self): self.assertTrue(os.path.isdir(self.tmp)) self.reference("""<!ENTITY foo 'fooVal'> <!ENTITY bar 'barVal'> <!ENTITY eff 'effVal'>""") self.localized("""<!ENTITY foo 'lFoo'> <!ENTITY bar 'lBar'> <!ENTITY eff 'lEff'> """) cc = ContentComparer([Observer()]) cc.compare(File(self.ref, "en-reference.dtd", ""), File(self.l10n, "l10n.dtd", ""), mozpath.join(self.tmp, "merge", "l10n.dtd")) self.assertDictEqual( cc.observers[0].toJSON(), {'summary': {None: { 'changed': 3, 'changed_w': 3 }}, 'details': {} } ) self.assert_( not os.path.exists(mozpath.join(self.tmp, "merge", 'l10n.dtd')))
def loadConfigs(self): """Entry point to load the l10n.ini file this Parser refers to. This implementation uses synchronous loads, subclasses might overload this behaviour. If you do, make sure to pass a file-like object to onLoadConfig. """ cp = ConfigParser(self.defaults) cp.read(self.inipath) depth = self.getDepth(cp) self.base = mozpath.join(mozpath.dirname(self.inipath), depth) # create child loaders for any other l10n.ini files to be included try: for title, path in cp.items('includes'): # skip default items if title in self.defaults: continue # add child config parser self.addChild(title, path, cp) except NoSectionError: pass # try to load the "dirs" defined in the "compare" section try: self.dirs.extend(cp.get('compare', 'dirs').split()) except (NoOptionError, NoSectionError): pass # try to set "all_path" and "all_url" try: self.all_path = mozpath.join(self.base, cp.get('general', 'all')) except (NoOptionError, NoSectionError): self.all_path = None return cp
def test_duplicate_attributes(self): self.assertTrue(os.path.isdir(self.tmp)) self.reference("""foo = fooVal .attr = good""") self.localized("""foo = localized .attr = not .attr = so .attr = good """) cc = ContentComparer([Observer()]) cc.compare(File(self.ref, "en-reference.ftl", ""), File(self.l10n, "l10n.ftl", ""), mozpath.join(self.tmp, "merge", "l10n.ftl")) self.assertDictEqual( cc.observers[0].toJSON(), {'summary': {None: { 'warnings': 1, 'changed': 1, 'changed_w': 2 }}, 'details': { 'l10n.ftl': [ {'warning': u'Attribute "attr" occurs 3 times ' u'at line 4, column 5 for foo' }] } }) mergefile = mozpath.join(self.tmp, "merge", "l10n.ftl") self.assertFalse(os.path.isfile(mergefile))
def testMismatchingAttributes(self): self.reference(""" foo = Foo bar = Bar .tender = Attribute value eff = Eff """) self.localized("""\ foo = lFoo .obsolete = attr bar = lBar eff = lEff """) cc = ContentComparer([Observer()]) cc.compare(File(self.ref, "en-reference.ftl", ""), File(self.l10n, "l10n.ftl", ""), mozpath.join(self.tmp, "merge", "l10n.ftl")) self.assertDictEqual( cc.observers[0].toJSON(), { 'details': { 'l10n.ftl': [ { 'error': u'Obsolete attribute: ' 'obsolete at line 2, column 3 for foo' }, { 'error': u'Missing attribute: tender at line 3,' ' column 1 for bar', }, ], }, 'summary': { None: {'changed': 3, 'changed_w': 5, 'errors': 2} } } ) # validate merge results mergepath = mozpath.join(self.tmp, "merge", "l10n.ftl") self.assert_(os.path.exists(mergepath)) p = getParser(mergepath) p.readFile(mergepath) merged_entities, merged_map = p.parse() self.assertEqual([e.key for e in merged_entities], ["eff"]) merged_eff = merged_entities[merged_map['eff']] # eff should be l10n p.readFile(self.l10n) l10n_entities, l10n_map = p.parse() l10n_eff = l10n_entities[l10n_map['eff']] self.assertTrue(merged_eff.equals(l10n_eff))
def test_reference_xml_error(self): self.assertTrue(os.path.isdir(self.tmp)) self.reference("""<!ENTITY foo 'fooVal'> <!ENTITY bar 'bad &val'> <!ENTITY eff 'effVal'>""") self.localized("""<!ENTITY foo 'fooVal'> <!ENTITY bar 'good val'> <!ENTITY eff 'effVal'> """) cc = ContentComparer([Observer()]) cc.compare(File(self.ref, "en-reference.dtd", ""), File(self.l10n, "l10n.dtd", ""), mozpath.join(self.tmp, "merge", "l10n.dtd")) self.assertDictEqual( cc.observers[0].toJSON(), {'summary': {None: { 'warnings': 1, 'unchanged': 2, 'unchanged_w': 2, 'changed': 1, 'changed_w': 2 }}, 'details': { 'l10n.dtd': [ {'warning': u"can't parse en-US value at line 1, " u"column 0 for bar"}] } })
def testObsolete(self): self.assertTrue(os.path.isdir(self.tmp)) self.reference("""foo = fooVal eff = effVal""") self.localized("""foo = fooVal other = obsolete eff = leffVal """) cc = ContentComparer([Observer()]) cc.compare(File(self.ref, "en-reference.properties", ""), File(self.l10n, "l10n.properties", ""), mozpath.join(self.tmp, "merge", "l10n.properties")) self.assertDictEqual( cc.observers[0].toJSON(), {'summary': {None: { 'changed': 1, 'changed_w': 1, 'obsolete': 1, 'unchanged': 1, 'unchanged_w': 1 }}, 'details': { 'l10n.properties': [ {'obsoleteEntity': u'other'}] } })
def set_root(self, basepath): if self.path is None: self.root = None return self.root = mozpath.abspath( mozpath.join(mozpath.dirname(self.path), basepath) )
def testMismatchingValues(self): self.reference(""" foo = Foo .foottr = something bar .tender = Attribute value """) self.localized("""\ foo .foottr = attr bar = lBar .tender = localized """) cc = ContentComparer([Observer()]) cc.compare(File(self.ref, "en-reference.ftl", ""), File(self.l10n, "l10n.ftl", ""), mozpath.join(self.tmp, "merge", "l10n.ftl")) self.assertDictEqual( cc.observers[0].toJSON(), { 'details': { 'l10n.ftl': [ { 'error': u'Missing value at line 1, column 1 for foo' }, { 'error': u'Obsolete value at line 3, column 7 for bar', }, ] }, 'summary': { None: {'changed': 2, 'changed_w': 4, 'errors': 2} } } ) # validate merge results mergepath = mozpath.join(self.tmp, "merge", "l10n.ftl") self.assert_(os.path.exists(mergepath)) p = getParser(mergepath) p.readFile(mergepath) merged_entities, _ = p.parse() self.assertEqual([e.key for e in merged_entities], [])
def test_universal_newlines(self): f = mozpath.join(self.dir, 'file') with open(f, 'wb') as fh: fh.write(b'one\ntwo\rthree\r\n') self.parser.readFile(f) self.assertEqual( self.parser.ctx.contents, 'one\ntwo\nthree\n')
def addChild(self, title, path, orig_cp): # check if there's a section with details for this include # we might have to check a different repo, or even VCS # for example, projects like "mail" indicate in # an "include_" section where to find the l10n.ini for "toolkit" details = 'include_' + title if orig_cp.has_section(details): branch = orig_cp.get(details, 'mozilla') branch = self.redirects.get(branch, branch) inipath = orig_cp.get(details, 'l10n.ini') path = mozpath.join(self.base, branch, inipath) else: path = mozpath.join(self.base, path) cp = SourceTreeConfigParser(path, self.base, self.redirects, **self.defaults) cp.loadConfigs() self.children.append(cp)
def addChild(self, title, path, orig_cp): """Create a child L10nConfigParser and load it. title -- indicates the module's name path -- indicates the path to the module's l10n.ini file orig_cp -- the configuration parser of this l10n.ini """ cp = L10nConfigParser(mozpath.join(self.base, path), **self.defaults) cp.loadConfigs() self.children.append(cp)
def test_app(self): 'Test parsing a App' app = EnumerateApp( mozpath.join(self.stage, 'comm', 'mail', 'locales', 'l10n.ini'), mozpath.join(self.stage, 'l10n-central')) self.assertListEqual(app.config.allLocales(), ['af', 'de', 'fr']) self.assertEqual(len(app.config.children), 1) projectconfig = app.asConfig() self.assertListEqual(projectconfig.locales, ['af', 'de', 'fr']) files = ProjectFiles('de', [projectconfig]) files = list(files) self.assertEqual(len(files), 3) l10nfile, reffile, mergefile, test = files[0] self.assertListEqual(mozpath.split(l10nfile)[-3:], ['de', 'mail', 'mail.ftl']) self.assertListEqual(mozpath.split(reffile)[-4:], ['mail', 'locales', 'en-US', 'mail.ftl']) self.assertIsNone(mergefile) self.assertSetEqual(test, set()) l10nfile, reffile, mergefile, test = files[1] self.assertListEqual(mozpath.split(l10nfile)[-3:], ['de', 'toolkit', 'localized.ftl']) self.assertListEqual( mozpath.split(reffile)[-6:], ['comm', 'mozilla', 'toolkit', 'locales', 'en-US', 'localized.ftl']) self.assertIsNone(mergefile) self.assertSetEqual(test, set()) l10nfile, reffile, mergefile, test = files[2] self.assertListEqual(mozpath.split(l10nfile)[-3:], ['de', 'toolkit', 'platform.ftl']) self.assertListEqual( mozpath.split(reffile)[-6:], ['comm', 'mozilla', 'toolkit', 'locales', 'en-US', 'platform.ftl']) self.assertIsNone(mergefile) self.assertSetEqual(test, set())
def testJunk(self): self.assertTrue(os.path.isdir(self.tmp)) self.reference("""<!ENTITY foo 'fooVal'> <!ENTITY bar 'barVal'> <!ENTITY eff 'effVal'>""") self.localized("""<!ENTITY foo 'fooVal'> <!ENTY bar 'gimmick'> <!ENTITY eff 'effVal'> """) cc = ContentComparer([Observer()]) cc.compare(File(self.ref, "en-reference.dtd", ""), File(self.l10n, "l10n.dtd", ""), mozpath.join(self.tmp, "merge", "l10n.dtd")) self.assertDictEqual( cc.observers[0].toJSON(), {'summary': {None: { 'errors': 1, 'missing': 1, 'missing_w': 1, 'unchanged': 2, 'unchanged_w': 2 }}, 'details': { 'l10n.dtd': [ {'error': u'Unparsed content "<!ENTY bar ' u'\'gimmick\'>\n" ' u'from line 2 column 1 to ' u'line 3 column 1'}, {'missingEntity': u'bar'}] } }) mergefile = mozpath.join(self.tmp, "merge", "l10n.dtd") self.assertTrue(os.path.isfile(mergefile)) p = getParser(mergefile) p.readFile(mergefile) [m, n] = p.parse() self.assertEqual(map(lambda e: e.key, m), ["foo", "eff", "bar"])
def test_matching_tags(self): self.assertTrue(os.path.isdir(self.tmp)) self.reference("""foo = fooVal #yes """) self.localized("""foo = fooVal #yes """) cc = ContentComparer([Observer()]) cc.compare(File(self.ref, "en-reference.ftl", ""), File(self.l10n, "l10n.ftl", ""), mozpath.join(self.tmp, "merge", "l10n.ftl")) self.assertDictEqual( cc.observers[0].toJSON(), {'summary': {None: { 'unchanged': 1, 'unchanged_w': 1 }}, 'details': {} }) mergefile = mozpath.join(self.tmp, "merge", "l10n.ftl") self.assertFalse(os.path.isfile(mergefile))
def testMissing(self): self.reference("""\ foo = fooVal bar = barVal eff = effVal """) self.localized("""\ foo = lFoo eff = lEff """) cc = ContentComparer([Observer()]) cc.compare(File(self.ref, "en-reference.ftl", ""), File(self.l10n, "l10n.ftl", ""), mozpath.join(self.tmp, "merge", "l10n.ftl")) self.assertDictEqual( cc.observers[0].toJSON(), { 'details': { 'l10n.ftl': [ {'missingEntity': u'bar'} ], }, 'summary': { None: { 'changed': 2, 'changed_w': 2, 'missing': 1, 'missing_w': 1 } } } ) # validate merge results mergepath = mozpath.join(self.tmp, "merge", "l10n.ftl") self.assert_(not os.path.exists(mergepath))
def testError(self): self.assertTrue(os.path.isdir(self.tmp)) self.reference("""foo = fooVal bar = %d barVal eff = effVal""") self.localized("""\ bar = %S lBar eff = leffVal """) cc = ContentComparer([Observer()]) cc.compare(File(self.ref, "en-reference.properties", ""), File(self.l10n, "l10n.properties", ""), mozpath.join(self.tmp, "merge", "l10n.properties")) self.assertDictEqual( cc.observers[0].toJSON(), {'summary': {None: { 'changed': 2, 'changed_w': 3, 'errors': 1, 'missing': 1, 'missing_w': 1 }}, 'details': { 'l10n.properties': [ {'missingEntity': u'foo'}, {'error': u'argument 1 `S` should be `d` ' u'at line 1, column 7 for bar'}] } }) mergefile = mozpath.join(self.tmp, "merge", "l10n.properties") self.assertTrue(os.path.isfile(mergefile)) p = getParser(mergefile) p.readFile(mergefile) [m, n] = p.parse() self.assertEqual([e.key for e in m], ["eff", "foo", "bar"]) self.assertEqual(m[n['bar']].val, '%d barVal')
def getFilters(self): '''Get the test functions from this ConfigParser and all children. Only works with synchronous loads, used by compare-locales, which is local anyway. ''' filter_path = mozpath.join(mozpath.dirname(self.inipath), 'filter.py') try: local = {} with open(filter_path) as f: exec(compile(f.read(), filter_path, 'exec'), {}, local) if 'test' in local and callable(local['test']): filters = [local['test']] else: filters = [] except BaseException: # we really want to handle EVERYTHING here filters = [] for c in self.children: filters += c.getFilters() return filters
def gather_repo(self, repo): basepath = repo.path pc = TOMLParser().parse(mozpath.join(basepath, "l10n.toml")) paths = ["l10n.toml"] + [ mozpath.relpath( m["reference"].pattern.expand(m["reference"].env), basepath ) for m in pc.paths ] self.paths_for_repos[repo.name] = paths branches = repo.branches() self.branches[repo.name] = branches[:] known_revs = self.revs.get(repo.name, {}) for branch_num in range(len(branches)): branch = branches[branch_num] prior_branches = branches[:branch_num] cmd = [ "git", "-C", basepath, "log", "--parents", "--format=%H %ct %P" ] + [ "^" + repo.ref(b) for b in prior_branches ] if branch in known_revs: cmd += ["^" + known_revs[branch]] block_revs = [] elif branch_num == 0: # We haven't seen this repo yet. # Block all known revs in the target from being converted again # in case of repository-level forks. block_revs = self.target.known_revs() cmd += [repo.ref(branch), "--"] + paths out = subprocess.run( cmd, stdout=subprocess.PIPE, encoding="ascii" ).stdout for commit_line in out.splitlines(): segs = commit_line.split() commit = segs.pop(0) if commit in block_revs: continue commit_date = int(segs.pop(0)) self.repos_for_hash[commit].append((repo.name, branch)) self.hashes_for_repo[repo.name].add(commit) self.commit_dates[commit] = max( commit_date, self.commit_dates.get(commit, 0) ) for parent in segs: self.parents[commit].add(parent) self.children[parent].add(commit) if branch in known_revs or branch_num == 0: continue # We don't know this branch yet, and it's a fork. # Find the branch point to the previous branches. for prior_branch in prior_branches: cmd = [ "git", "-C", basepath, "merge-base", repo.ref(branch), repo.ref(prior_branch), ] branch_rev = subprocess.run( cmd, stdout=subprocess.PIPE, encoding="ascii" ).stdout.strip() if not branch_rev: continue # We have a branch revision, find the next child on the # route to the prior branch to add that to. cmd = [ "git", "-C", basepath, "rev-list", "-n", "1", "{}..{}".format(branch_rev, repo.ref(prior_branch)), ] fork_rev = subprocess.run( cmd, stdout=subprocess.PIPE, encoding="ascii" ).stdout.strip() if fork_rev: self.forks[fork_rev].append( (repo.name, branch, branch_rev) )
def setUp(self): self.stage = tempfile.mkdtemp() mail = mozpath.join(self.stage, 'comm', 'mail', 'locales') toolkit = mozpath.join( self.stage, 'comm', 'mozilla', 'toolkit', 'locales') l10n = mozpath.join(self.stage, 'l10n-central', 'de', 'toolkit') os.makedirs(mozpath.join(mail, 'en-US')) os.makedirs(mozpath.join(toolkit, 'en-US')) os.makedirs(l10n) with open(mozpath.join(mail, 'l10n.ini'), 'w') as f: f.write(MAIL_INI) with open(mozpath.join(mail, 'all-locales'), 'w') as f: f.write(MAIL_ALL_LOCALES) with open(mozpath.join(mail, 'filter.py'), 'w') as f: f.write(MAIL_FILTER_PY) with open(mozpath.join(toolkit, 'l10n.ini'), 'w') as f: f.write(TOOLKIT_INI) with open(mozpath.join(mail, 'en-US', 'mail.ftl'), 'w') as f: f.write('') with open(mozpath.join(toolkit, 'en-US', 'platform.ftl'), 'w') as f: f.write('') with open(mozpath.join(l10n, 'localized.ftl'), 'w') as f: f.write('')
def test_join(self): self.assertEqual(join('foo', 'bar', 'baz'), 'foo/bar/baz') self.assertEqual(join('foo', '', 'bar'), 'foo/bar') self.assertEqual(join('', 'foo', 'bar'), 'foo/bar') self.assertEqual(join('', 'foo', '/bar'), '/bar')
def localpath(self): if self.module: return mozpath.join(self.locale, self.module, self.file) return self.file
def localpath(self): f = self.file if self.module: f = mozpath.join(self.module, f) return f
def testBroken(self): self.reference("""\ foo = fooVal bar = barVal eff = effVal """) self.localized("""\ -- Invalid Comment foo = lFoo bar lBar eff = lEff { """) cc = ContentComparer([Observer()]) cc.compare(File(self.ref, "en-reference.ftl", ""), File(self.l10n, "l10n.ftl", ""), mozpath.join(self.tmp, "merge", "l10n.ftl")) self.assertDictEqual( cc.observers[0].toJSON(), { 'details': { 'l10n.ftl': [ {'error': u'Unparsed content "-- Invalid Comment" ' u'from line 1 column 1 ' u'to line 1 column 19'}, {'error': u'Unparsed content "bar lBar" ' u'from line 3 column 1 ' u'to line 3 column 9'}, {'error': u'Unparsed content "eff = lEff {" ' u'from line 4 column 1 ' u'to line 4 column 13'}, {'missingEntity': u'bar'}, {'missingEntity': u'eff'}, ], }, 'summary': { None: { 'changed': 1, 'changed_w': 1, 'missing': 2, 'missing_w': 2, 'errors': 3 } } } ) # validate merge results mergepath = mozpath.join(self.tmp, "merge", "l10n.ftl") self.assert_(os.path.exists(mergepath)) p = getParser(mergepath) p.readFile(mergepath) merged_entities, merged_map = p.parse() self.assertEqual([e.key for e in merged_entities], ["foo"]) merged_foo = merged_entities[merged_map['foo']] # foo should be l10n p.readFile(self.l10n) l10n_entities, l10n_map = p.parse() l10n_foo = l10n_entities[l10n_map['foo']] self.assertTrue(merged_foo.equals(l10n_foo))
def localized(self, content): self.l10n = mozpath.join(self.tmp, "l10n" + self.extension) open(self.l10n, "w").write(content)
def _files(self, matcher): base = matcher.prefix for path in self.mocks.get(base, []): p = mozpath.join(base, path) if matcher.match(p): yield p
def test_join(self): self.assertEqual(join('foo', 'bar', 'baz'), 'foo/bar/baz') self.assertEqual(join('foo', '', 'bar'), 'foo/bar') self.assertEqual(join('', 'foo', 'bar'), 'foo/bar') self.assertEqual(join('', 'foo', '/bar'), '/bar')
def reference(self, content): self.ref = mozpath.join(self.tmp, "en-reference" + self.extension) open(self.ref, "w").write(content)
def reference(self, content): self.ref = mozpath.join(self.tmp, "en-reference" + self.extension) open(self.ref, "w").write(content)
def localized(self, content): self.l10n = mozpath.join(self.tmp, "l10n" + self.extension) open(self.l10n, "w").write(content)
def setUp(self): self.maxDiff = None self.tmp = mkdtemp() os.mkdir(mozpath.join(self.tmp, "merge"))
def test_universal_newlines(self): f = mozpath.join(self.dir, 'file') with open(f, 'wb') as fh: fh.write(b'one\ntwo\rthree\r\n') self.parser.readFile(f) self.assertEqual(self.parser.ctx.contents, 'one\ntwo\nthree\n')
def set_root(self, basepath): if self.path is None: self.root = None return self.root = mozpath.abspath( mozpath.join(mozpath.dirname(self.path), basepath))
def setUp(self): self.stage = tempfile.mkdtemp() mail = mozpath.join(self.stage, 'comm', 'mail', 'locales') toolkit = mozpath.join( self.stage, 'comm', 'mozilla', 'toolkit', 'locales') l10n = mozpath.join(self.stage, 'l10n-central', 'de', 'toolkit') os.makedirs(mozpath.join(mail, 'en-US')) os.makedirs(mozpath.join(toolkit, 'en-US')) os.makedirs(l10n) with open(mozpath.join(mail, 'l10n.ini'), 'w') as f: f.write(MAIL_INI) with open(mozpath.join(mail, 'all-locales'), 'w') as f: f.write(MAIL_ALL_LOCALES) with open(mozpath.join(mail, 'filter.py'), 'w') as f: f.write(MAIL_FILTER_PY) with open(mozpath.join(toolkit, 'l10n.ini'), 'w') as f: f.write(TOOLKIT_INI) with open(mozpath.join(mail, 'en-US', 'mail.ftl'), 'w') as f: f.write('') with open(mozpath.join(toolkit, 'en-US', 'platform.ftl'), 'w') as f: f.write('') with open(mozpath.join(l10n, 'localized.ftl'), 'w') as f: f.write('')
def testBroken(self): self.reference("""\ foo = fooVal bar = barVal eff = effVal """) self.localized("""\ -- Invalid Comment foo = lFoo bar lBar eff = lEff { """) cc = ContentComparer() cc.observers.append(Observer()) cc.compare(File(self.ref, "en-reference.ftl", ""), File(self.l10n, "l10n.ftl", ""), mozpath.join(self.tmp, "merge", "l10n.ftl")) self.assertDictEqual( cc.observers.toJSON(), { 'details': { 'l10n.ftl': [ { 'error': u'Unparsed content "-- Invalid Comment" ' u'from line 1 column 1 ' u'to line 1 column 19' }, { 'error': u'Unparsed content "bar lBar" ' u'from line 3 column 1 ' u'to line 3 column 9' }, { 'error': u'Unparsed content "eff = lEff {" ' u'from line 4 column 1 ' u'to line 4 column 13' }, { 'missingEntity': u'bar' }, { 'missingEntity': u'eff' }, ], }, 'summary': { None: { 'changed': 1, 'changed_w': 1, 'missing': 2, 'missing_w': 2, 'errors': 3 } } }) # validate merge results mergepath = mozpath.join(self.tmp, "merge", "l10n.ftl") self.assertTrue(os.path.exists(mergepath)) p = getParser(mergepath) p.readFile(mergepath) merged_entities = p.parse() self.assertEqual(list(merged_entities.keys()), ["foo"]) merged_foo = merged_entities['foo'] # foo should be l10n p.readFile(self.l10n) l10n_entities = p.parse() l10n_foo = l10n_entities['foo'] self.assertTrue(merged_foo.equals(l10n_foo))
def resolvepath(self, path): path = self.pc.expand(path, env=self.env) path = mozpath.join(mozpath.dirname(self.path), self.data.get('basepath', '.'), path) return mozpath.normpath(path)
def _files(self, matcher): base = matcher.prefix for path in self.mocks.get(base, []): p = mozpath.join(base, path) if matcher.match(p): yield p
def ref(self): return mozpath.join(self.tmp, "en-reference" + self.extension)
def l10n(self): return mozpath.join(self.tmp, "l10n" + self.extension)
def setUp(self): self.maxDiff = None self.tmp = mkdtemp() os.mkdir(mozpath.join(self.tmp, "merge"))
def _files(self, matcher): for f in self.ctx.manifest(): f = mozpath.join(self.root, f) if matcher.match(f): yield f