def mergeTranslationMessages(self): """Share `TranslationMessage`s between templates where possible.""" order_check = OrderingCheck( key=methodcaller('sharingKey'), reverse=True) for template_number, template in enumerate(self.potemplates): log.info("Merging template %d/%d." % ( template_number + 1, len(self.potemplates))) deletions = 0 order_check.check(template) potmsgset_ids = self._getPOTMsgSetIds(template) for potmsgset_id in potmsgset_ids: potmsgset = POTMsgSet.get(potmsgset_id) tm_ids = self._partitionTranslationMessageIds(potmsgset) before = sum([len(sublist) for sublist in tm_ids], 0) for ids in tm_ids: for id in ids: message = TranslationMessage.get(id) removeSecurityProxy(message).shareIfPossible() self.tm.endTransaction(intermediate=True) after = potmsgset.getAllTranslationMessages().count() deletions += max(0, before - after) report = "Deleted TranslationMessages: %d." % deletions if deletions > 0: log.info(report) else: log.log(DEBUG2, report)
def mergeTranslationMessages(self): """Share `TranslationMessage`s between templates where possible.""" order_check = OrderingCheck(key=methodcaller('sharingKey'), reverse=True) for template_number, template in enumerate(self.potemplates): log.info("Merging template %d/%d." % (template_number + 1, len(self.potemplates))) deletions = 0 order_check.check(template) potmsgset_ids = self._getPOTMsgSetIds(template) for potmsgset_id in potmsgset_ids: potmsgset = POTMsgSet.get(potmsgset_id) tm_ids = self._partitionTranslationMessageIds(potmsgset) before = sum([len(sublist) for sublist in tm_ids], 0) for ids in tm_ids: for id in ids: message = TranslationMessage.get(id) removeSecurityProxy(message).shareIfPossible() self.tm.endTransaction(intermediate=True) after = potmsgset.getAllTranslationMessages().count() deletions += max(0, before - after) report = "Deleted TranslationMessages: %d." % deletions if deletions > 0: log.info(report) else: log.log(DEBUG2, report)
def _removeDuplicateMessages(self): """Get rid of duplicate `TranslationMessages` where needed.""" representatives = {} order_check = OrderingCheck( key=methodcaller('sharingKey'), reverse=True) for template in self.potemplates: order_check.check(template) for potmsgset in template.getPOTMsgSets(False, prefetch=False): key = get_potmsgset_key(potmsgset) if key not in representatives: representatives[key] = potmsgset.id self.tm.endTransaction(intermediate=True) for representative_id in representatives.itervalues(): representative = POTMsgSet.get(representative_id) self._scrubPOTMsgSetTranslations(representative) self.tm.endTransaction(intermediate=True)
def _removeDuplicateMessages(self): """Get rid of duplicate `TranslationMessages` where needed.""" representatives = {} order_check = OrderingCheck(key=methodcaller('sharingKey'), reverse=True) for template in self.potemplates: order_check.check(template) for potmsgset in template.getPOTMsgSets(False, prefetch=False): key = get_potmsgset_key(potmsgset) if key not in representatives: representatives[key] = potmsgset.id self.tm.endTransaction(intermediate=True) for representative_id in representatives.itervalues(): representative = POTMsgSet.get(representative_id) self._scrubPOTMsgSetTranslations(representative) self.tm.endTransaction(intermediate=True)
def _mapRepresentatives(self): """Map out POTMsgSets' subordinates and templates. :return: A tuple of dicts. The first maps each `POTMsgSet`'s key (as returned by `get_potmsgset_key`) to a list of its subordinate `POTMsgSet`s. The second maps each representative `POTMsgSet` to its representative `POTemplate`. """ # Map each POTMsgSet key (context, msgid, plural) to its # representative POTMsgSet. representatives = {} # Map each representative POTMsgSet to a list of subordinate # POTMsgSets it represents. subordinates = {} # Map each representative POTMsgSet to its representative # POTemplate. representative_templates = {} # Figure out representative potmsgsets and their subordinates. Go # through the templates, starting at the most representative and # moving towards the least representative. For any unique potmsgset # key we find, the first POTMsgSet is the representative one. order_check = OrderingCheck( key=methodcaller('sharingKey'), reverse=True) for template in self.potemplates: order_check.check(template) for potmsgset in template.getPOTMsgSets(False, prefetch=False): key = get_potmsgset_key(potmsgset) if key not in representatives: representatives[key] = potmsgset representative_templates[potmsgset] = template representative = representatives[key] if representative in subordinates: subordinates[representative].append(potmsgset) else: subordinates[representative] = [] return subordinates, representative_templates
def _mapRepresentatives(self): """Map out POTMsgSets' subordinates and templates. :return: A tuple of dicts. The first maps each `POTMsgSet`'s key (as returned by `get_potmsgset_key`) to a list of its subordinate `POTMsgSet`s. The second maps each representative `POTMsgSet` to its representative `POTemplate`. """ # Map each POTMsgSet key (context, msgid, plural) to its # representative POTMsgSet. representatives = {} # Map each representative POTMsgSet to a list of subordinate # POTMsgSets it represents. subordinates = {} # Map each representative POTMsgSet to its representative # POTemplate. representative_templates = {} # Figure out representative potmsgsets and their subordinates. Go # through the templates, starting at the most representative and # moving towards the least representative. For any unique potmsgset # key we find, the first POTMsgSet is the representative one. order_check = OrderingCheck(key=methodcaller('sharingKey'), reverse=True) for template in self.potemplates: order_check.check(template) for potmsgset in template.getPOTMsgSets(False, prefetch=False): key = get_potmsgset_key(potmsgset) if key not in representatives: representatives[key] = potmsgset representative_templates[potmsgset] = template representative = representatives[key] if representative in subordinates: subordinates[representative].append(potmsgset) else: subordinates[representative] = [] return subordinates, representative_templates
def dominatePackage(self, sorted_pubs, live_versions, generalization): """Dominate publications for a single package. The latest publication for any version in `live_versions` stays active. Any older publications (including older publications for live versions with multiple publications) are marked as superseded by the respective oldest live releases that are newer than the superseded ones. Any versions that are newer than anything in `live_versions` are marked as deleted. This should not be possible in Soyuz-native archives, but it can happen during archive imports when the previous latest version of a package has disappeared from the Sources list we import. :param sorted_pubs: A list of publications for the same package, in the same archive, series, and pocket, all with status `PackagePublishingStatus.PUBLISHED`. They must be sorted from most current to least current, as would be the result of `generalization.sortPublications`. :param live_versions: Iterable of versions that are still considered "live" for this package. For any of these, the latest publication among `publications` will remain Published. Publications for older releases, as well as older publications of live versions, will be marked as Superseded. Publications of newer versions than are listed in `live_versions` are marked as Deleted. :param generalization: A `GeneralizedPublication` helper representing the kind of publications these are: source or binary. """ live_versions = frozenset(live_versions) self.logger.debug( "Package has %d live publication(s). Live versions: %s", len(sorted_pubs), live_versions) # Verify that the publications are really sorted properly. check_order = OrderingCheck(cmp=generalization.compare, reverse=True) current_dominant = None dominant_version = None for pub in sorted_pubs: check_order.check(pub) version = generalization.getPackageVersion(pub) # There should never be two published releases with the same # version. So it doesn't matter whether this comparison is # really a string comparison or a version comparison: if the # versions are equal by either measure, they're from the same # release. if version == dominant_version: # This publication is for a live version, but has been # superseded by a newer publication of the same version. # Supersede it. pub.supersede(current_dominant, logger=self.logger) self.logger.debug2( "Superseding older publication for version %s.", version) elif version in live_versions: # This publication stays active; if any publications # that follow right after this are to be superseded, # this is the release that they are superseded by. current_dominant = pub dominant_version = version self.logger.debug2("Keeping version %s.", version) elif current_dominant is None: # This publication is no longer live, but there is no # newer version to supersede it either. Therefore it # must be deleted. pub.requestDeletion(None) self.logger.debug2("Deleting version %s.", version) else: # This publication is superseded. This is what we're # here to do. pub.supersede(current_dominant, logger=self.logger) self.logger.debug2("Superseding version %s.", version)
def dominatePackage(self, sorted_pubs, live_versions, generalization, immutable_check=True): """Dominate publications for a single package. The latest publication for any version in `live_versions` stays active. Any older publications (including older publications for live versions with multiple publications) are marked as superseded by the respective oldest live releases that are newer than the superseded ones. Any versions that are newer than anything in `live_versions` are marked as deleted. This should not be possible in Soyuz-native archives, but it can happen during archive imports when the previous latest version of a package has disappeared from the Sources list we import. :param sorted_pubs: A list of publications for the same package, in the same archive, series, and pocket, all with status `PackagePublishingStatus.PUBLISHED`. They must be sorted from most current to least current, as would be the result of `generalization.sortPublications`. :param live_versions: Iterable of versions that are still considered "live" for this package. For any of these, the latest publication among `publications` will remain Published. Publications for older releases, as well as older publications of live versions, will be marked as Superseded. Publications of newer versions than are listed in `live_versions` are marked as Deleted. :param generalization: A `GeneralizedPublication` helper representing the kind of publications these are: source or binary. """ live_versions = frozenset(live_versions) self.logger.debug( "Package has %d live publication(s). Live versions: %s", len(sorted_pubs), live_versions) # Verify that the publications are really sorted properly. check_order = OrderingCheck(cmp=generalization.compare, reverse=True) current_dominant = None dominant_version = None for pub in sorted_pubs: check_order.check(pub) version = generalization.getPackageVersion(pub) # There should never be two published releases with the same # version. So it doesn't matter whether this comparison is # really a string comparison or a version comparison: if the # versions are equal by either measure, they're from the same # release. if version == dominant_version: # This publication is for a live version, but has been # superseded by a newer publication of the same version. # Supersede it. pub.supersede(current_dominant, logger=self.logger) self.logger.debug2( "Superseding older publication for version %s.", version) elif version in live_versions: # This publication stays active; if any publications # that follow right after this are to be superseded, # this is the release that they are superseded by. current_dominant = pub dominant_version = version self.logger.debug2("Keeping version %s.", version) elif current_dominant is None: # This publication is no longer live, but there is no # newer version to supersede it either. Therefore it # must be deleted. pub.requestDeletion(None, immutable_check=immutable_check) self.logger.debug2("Deleting version %s.", version) else: # This publication is superseded. This is what we're # here to do. pub.supersede(current_dominant, logger=self.logger) self.logger.debug2("Superseding version %s.", version)