Esempio n. 1
0
 def test_items(self):
     things = [KeyedThing('one', 'thing'), KeyedThing('two', 'things')]
     things.extend(things)
     keyedtuple = KeyedTuple(things)
     self.assertEqual(len(keyedtuple), 4)
     items = list(keyedtuple.items())
     self.assertEqual(len(items), 4)
     self.assertEqual(keyedtuple, tuple((v for k, v in items)))
     self.assertEqual((
         'one',
         'two',
         'one',
         'two',
     ), tuple((k for k, v in items)))
Esempio n. 2
0
 def test_getitem(self):
     things = [KeyedThing('one', 'thing'), KeyedThing('two', 'things')]
     keyedtuple = KeyedTuple(things)
     self.assertEqual(keyedtuple[0], things[0])
     self.assertEqual(keyedtuple[1], things[1])
     self.assertEqual(keyedtuple['one'], things[0])
     self.assertEqual(keyedtuple['two'], things[1])
Esempio n. 3
0
def run_checks(entity, locale_code, string):
    """
    Run all compare-locales checks on provided translation and entity.
    :arg pontoon.base.models.Entity entity: Source entity instance
    :arg basestring locale_code: Locale of a translation
    :arg basestring string: translation string

    :return: Dictionary with the following structure:
        {
            'clErrors': [
                'Error1',
            ],
            'clWarnings': [
                'Warning1',
            ]
        }
        Both keys are optional.
    """
    resource_ext = ".{}".format(entity.resource.format)
    extra_tests = None

    if "mobile/android/base" in entity.resource.path:
        extra_tests = {"android-dtd"}
        entity.string = escape_quotes(entity.string)
        string = escape_quotes(string)

    source_ent, translation_ent = cast_to_compare_locales(
        resource_ext,
        entity,
        string,
    )

    checker = getChecker(
        File(entity.resource.path, entity.resource.path, locale=locale_code),
        extra_tests,
    )
    if checker is None:
        # compare-locales has no checks for this format, it's OK.
        return {}

    # Currently, references are required only by DTD files but that may change in the future.
    if checker.needs_reference:
        references = KeyedTuple(
            CompareDTDEntity(
                e.key,
                e.string,
                e.comment,
            ) for e in entity.resource.entities.all())
        checker.set_reference(references)

    errors = {}

    for severity, _, message, _ in checker.check(source_ent, translation_ent):
        messages = errors.setdefault("cl%ss" % severity.capitalize(), [])
        # Old-school duplicate prevention - set() is not JSON serializable
        if message not in messages:
            messages.append(message)

    return errors
Esempio n. 4
0
    def remove(self, ref_file, l10n, merge_file):
        '''Obsolete l10n file.

        Copy to merge stage if we can.
        '''
        self.observers.notify('obsoleteFile', l10n, None)
        self.merge(KeyedTuple([]), ref_file, l10n, merge_file, [], [], None,
                   parser.CAN_COPY, None)
Esempio n. 5
0
 def test_contains(self):
     things = [KeyedThing('one', 'thing'), KeyedThing('two', 'things')]
     keyedtuple = KeyedTuple(things)
     self.assertNotIn(1, keyedtuple)
     self.assertIn('one', keyedtuple)
     self.assertIn(things[0], keyedtuple)
     self.assertIn(things[1], keyedtuple)
     self.assertNotIn(KeyedThing('three', 'stooges'), keyedtuple)
Esempio n. 6
0
    def add(self, orig, missing, merge_file):
        ''' Add missing localized file.'''
        f = orig
        try:
            p = parser.getParser(f.file)
        except UserWarning:
            p = None

        # if we don't support this file, assume CAN_COPY to mimick
        # l10n dir as closely as possible
        caps = p.capabilities if p else parser.CAN_COPY
        if (caps & (parser.CAN_COPY | parser.CAN_MERGE)):
            # even if we can merge, pretend we can only copy
            self.merge(KeyedTuple([]), orig, missing, merge_file,
                       ['trigger copy'], [], None, parser.CAN_COPY, None)

        if self.observers.notify('missingFile', missing, None) == "ignore":
            # filter said that we don't need this file, don't count it
            return

        if p is None:
            # We don't have a parser, cannot count missing strings
            return

        try:
            p.readFile(f)
            entities = p.parse()
        except Exception as ex:
            self.observers.notify('error', f, str(ex))
            return
        # strip parse errors
        entities = [e for e in entities if not isinstance(e, parser.Junk)]
        self.observers.updateStats(missing, {'missing': len(entities)})
        missing_w = 0
        for e in entities:
            missing_w += e.count_words()
        self.observers.updateStats(missing, {'missing_w': missing_w})
Esempio n. 7
0
    def compare(self, ref_file, l10n, merge_file, extra_tests=None):
        try:
            p = parser.getParser(ref_file.file)
        except UserWarning:
            # no comparison, XXX report?
            # At least, merge
            self.merge(KeyedTuple([]), ref_file, l10n, merge_file, [], [],
                       None, parser.CAN_COPY, None)
            return
        try:
            p.readFile(ref_file)
        except Exception as e:
            self.observers.notify('error', ref_file, str(e))
            return
        ref_entities = p.parse()
        try:
            p.readFile(l10n)
            l10n_entities = p.parse()
            l10n_ctx = p.ctx
        except Exception as e:
            self.observers.notify('error', l10n, str(e))
            return

        ar = AddRemove()
        ar.set_left(ref_entities.keys())
        ar.set_right(l10n_entities.keys())
        report = missing = obsolete = changed = unchanged = keys = 0
        missing_w = changed_w = unchanged_w = 0  # word stats
        missings = []
        skips = []
        checker = getChecker(l10n, extra_tests=extra_tests)
        if checker and checker.needs_reference:
            checker.set_reference(ref_entities)
        for msg in p.findDuplicates(ref_entities):
            self.observers.notify('warning', l10n, msg)
        for msg in p.findDuplicates(l10n_entities):
            self.observers.notify('error', l10n, msg)
        for action, entity_id in ar:
            if action == 'delete':
                # missing entity
                if isinstance(ref_entities[entity_id], parser.Junk):
                    self.observers.notify('warning', l10n,
                                          'Parser error in en-US')
                    continue
                _rv = self.observers.notify('missingEntity', l10n, entity_id)
                if _rv == "ignore":
                    continue
                if _rv == "error":
                    # only add to missing entities for l10n-merge on error,
                    # not report
                    missings.append(entity_id)
                    missing += 1
                    refent = ref_entities[entity_id]
                    missing_w += refent.count_words()
                else:
                    # just report
                    report += 1
            elif action == 'add':
                # obsolete entity or junk
                if isinstance(l10n_entities[entity_id], parser.Junk):
                    junk = l10n_entities[entity_id]
                    self.observers.notify('error', l10n, junk.error_message())
                    if merge_file is not None:
                        skips.append(junk)
                elif (self.observers.notify('obsoleteEntity', l10n, entity_id)
                      != 'ignore'):
                    obsolete += 1
            else:
                # entity found in both ref and l10n, check for changed
                refent = ref_entities[entity_id]
                l10nent = l10n_entities[entity_id]
                if self.keyRE.search(entity_id):
                    keys += 1
                else:
                    if refent.equals(l10nent):
                        self.doUnchanged(l10nent)
                        unchanged += 1
                        unchanged_w += refent.count_words()
                    else:
                        self.doChanged(ref_file, refent, l10nent)
                        changed += 1
                        changed_w += refent.count_words()
                        # run checks:
                if checker:
                    for tp, pos, msg, cat in checker.check(refent, l10nent):
                        if isinstance(pos, EntityPos):
                            line, col = l10nent.position(pos)
                        else:
                            line, col = l10nent.value_position(pos)
                        # skip error entities when merging
                        if tp == 'error' and merge_file is not None:
                            skips.append(l10nent)
                        self.observers.notify(
                            tp, l10n, u"%s at line %d, column %d for %s" %
                            (msg, line, col, refent.key))
                pass

        if merge_file is not None:
            self.merge(ref_entities, ref_file, l10n, merge_file, missings,
                       skips, l10n_ctx, p.capabilities, p.encoding)

        stats = {
            'missing': missing,
            'missing_w': missing_w,
            'report': report,
            'obsolete': obsolete,
            'changed': changed,
            'changed_w': changed_w,
            'unchanged': unchanged,
            'unchanged_w': unchanged_w,
            'keys': keys,
        }
        self.observers.updateStats(l10n, stats)
        pass
Esempio n. 8
0
 def test_constructor(self):
     keyedtuple = KeyedTuple([])
     self.assertEqual(keyedtuple, tuple())
Esempio n. 9
0
 def parse(self):
     return KeyedTuple(self)