def merge_two(comments, newer, older): diff = AddRemove() diff.set_left(newer.keys()) diff.set_right(older.keys()) def get_entity(key): entity = newer.get(key, None) # Always prefer the newer version. if entity is not None: return entity entity = older.get(key) # If it's an old comment attached to an entity, try to find that # entity in newer and return None to use its comment instead in prune. if isinstance(entity, cl.Comment) and entity in comments: next_entity = newer.get(comments[entity], None) if next_entity is not None and next_entity.pre_comment: # We'll prune this before returning the merged result. return None return entity # Create a flat sequence of all entities in order reported by AddRemove. contents = [(key, get_entity(key)) for _, key in diff] def prune(acc, cur): _, entity = cur if entity is None: # Prune Nones which stand for duplicated comments. return acc if len(acc) and isinstance(entity, cl.Whitespace): _, prev_entity = acc[-1] if isinstance(prev_entity, cl.Whitespace): # Prefer the longer whitespace. if len(entity.all) > len(prev_entity.all): acc[-1] = (entity, entity) return acc acc.append(cur) return acc pruned = reduce(prune, contents, []) return OrderedDict(pruned)
def compare(self, locales): self.files() for locale in locales: l10n = self._files[locale] filecmp = AddRemove() filecmp.set_left(sorted(self._reference.keys())) filecmp.set_right(sorted(l10n.keys())) for op, item_or_pair in filecmp: if op == 'equal': self.watcher.compare(self._reference[item_or_pair[0]], l10n[item_or_pair[1]]) elif op == 'add': # obsolete file self.watcher.remove(l10n[item_or_pair]) else: # missing file _path = '.'.join([item_or_pair, locale, 'properties']) missingFile = File( os.path.join(self.basedir, 'locales', _path), 'locales/' + _path) self.watcher.add(self._reference[item_or_pair], missingFile)
def compare_strings(self, reference, l10n, locale): add_remove = AddRemove() add_remove.set_left(sorted(reference.keys())) add_remove.set_right(sorted(l10n.keys())) missing = obsolete = changed = unchanged = 0 for op, item_or_pair in add_remove: if op == 'equal': if reference[item_or_pair[0]] == l10n[item_or_pair[1]]: unchanged += 1 else: changed += 1 else: key = item_or_pair.replace('.AB_CD.', '.%s.' % locale) if op == 'add': # obsolete entry obsolete += 1 self.watcher.notify('obsoleteEntity', self.file, key) else: # missing entry missing += 1 self.watcher.notify('missingEntity', self.file, key)
def merge_two(comments, newer, older): diff = AddRemove() diff.set_left(newer.keys()) diff.set_right(older.keys()) def get_entity(key): entity = newer.get(key, None) # Always prefer the newer version. if entity is not None: return entity entity = older.get(key) # If it's an old comment attached to an entity, try to find that # entity in newer and return None to use its comment instead in prune. if isinstance(entity, cl.Comment) and entity in comments: next_entity = newer.get(comments[entity], None) if next_entity is not None and next_entity.pre_comment: # We'll prune this before returning the merged result. return None return entity # Create a flat sequence of all entities in order reported by AddRemove. contents = [(key, get_entity(key)) for _, key in diff] def prune(acc, cur): _, entity = cur if entity is None: # Prune Nones which stand for duplicated comments. return acc if len(acc) and isinstance(entity, cl.Whitespace): _, prev_entity = acc[-1] if isinstance(prev_entity, cl.Whitespace): # Prefer the longer whitespace. if len(entity.all) > len(prev_entity.all): acc[-1] = (entity, entity) return acc acc.append(cur) return acc pruned = six.moves.reduce(prune, contents, []) return OrderedDict(pruned)
def diffLines(self, path, action): lines = [] try: p = getParser(path) except UserWarning: return None if action == 'added': a_entities = [] a_map = {} else: realpath = (action == 'moved' and self.moved[path] or action == 'copied' and self.copied[path] or path) data = self.ctx1.filectx(realpath).data() data = self._universal_newlines(data) try: p.readContents(data) a_entities, a_map = p.parse() except: # consider doing something like: # logging.warn('Unable to parse %s', path, exc_info=True) return None if action == 'removed': c_entities, c_map = [], {} else: data = self.ctx2.filectx(path).data() data = self._universal_newlines(data) try: p.readContents(data) c_entities, c_map = p.parse() except: # consider doing something like: # logging.warn('Unable to parse %s', path, exc_info=True) return None a_list = sorted(a_map.keys()) c_list = sorted(c_map.keys()) ar = AddRemove() ar.set_left(a_list) ar.set_right(c_list) for action, item_or_pair in ar: if action == 'delete': lines.append({ 'class': 'removed', 'oldval': [{'value':a_entities[a_map[item_or_pair]].val}], 'newval': '', 'entity': item_or_pair }) elif action == 'add': lines.append({ 'class': 'added', 'oldval': '', 'newval': [{'value': c_entities[c_map[item_or_pair]].val}], 'entity': item_or_pair }) else: oldval = a_entities[a_map[item_or_pair[0]]].val newval = c_entities[c_map[item_or_pair[1]]].val if oldval == newval: continue sm = SequenceMatcher(None, oldval, newval) oldhtml = [] newhtml = [] for op, o1, o2, n1, n2 in sm.get_opcodes(): if o1 != o2: oldhtml.append({'class': op, 'value': oldval[o1:o2]}) if n1 != n2: newhtml.append({'class': op, 'value': newval[n1:n2]}) lines.append({'class': 'changed', 'oldval': oldhtml, 'newval': newhtml, 'entity': item_or_pair[0]}) return lines
def diffLines(self, path, action): '''The actual l10n-aware diff for a particular file. If file is not supported by compare-locale, return None. Use compare-locales to compare old and new revision, self.rev1 and rev2. Convert both into an OrderedDict of key to value, and create a inline diff for modified values. For Fluent attributes, concatenate key and attribute name with '.'. That's OK because fluent IDs don't allow '.'. ''' lines = [] try: p = getParser(path) except UserWarning: return None old_translations = OrderedDict() if action != 'added': realpath = (action == 'moved' and self.moved[path] or action == 'copied' and self.copied[path] or path) content = self.content(realpath, self.rev1) try: old_translations.update(self.parse(p, content)) except Exception: # consider doing something like: # logging.warn('Unable to parse %s', path, exc_info=True) return None new_translations = OrderedDict() if action != 'removed': content = self.content(path, self.rev2) try: new_translations.update(self.parse(p, content)) except Exception: # consider doing something like: # logging.warn('Unable to parse %s', path, exc_info=True) return None ar = AddRemove() ar.set_left(old_translations.keys()) ar.set_right(new_translations.keys()) for action, key in ar: if action == 'delete': if old_translations[key] is None: continue lines.append({ 'class': 'removed', 'oldval': [{ 'value': old_translations[key] }], 'newval': '', 'entity': key }) elif action == 'add': if new_translations[key] is None: continue lines.append({ 'class': 'added', 'oldval': '', 'newval': [{ 'value': new_translations[key] }], 'entity': key }) else: old_value = old_translations[key] new_value = new_translations[key] if old_value != new_value: oldhtml, newhtml = \ self.diff_strings(old_value, new_value) lines.append({ 'class': 'changed', 'oldval': oldhtml, 'newval': newhtml, 'entity': key }) return lines