def execute_slonik(script, sync=None, exit_on_fail=True, auto_preamble=True): """Use the slonik command line tool to run a slonik script. :param script: The script as a string. Preamble should not be included. :param sync: Number of seconds to wait for sync before failing. 0 to block indefinitely. :param exit_on_fail: If True, on failure of the slonik script SystemExit is raised using the slonik return code. :param auto_preamble: If True, the generated preamble will be automatically included. :returns: True if the script completed successfully. False if exit_on_fail is False and the script failed for any reason. """ # Add the preamble and optional sync to the script. if auto_preamble: script = preamble() + script if sync is not None: sync_script = dedent("""\ sync (id = @master_node); wait for event ( origin = @master_node, confirmed = ALL, wait on = @master_node, timeout = %d); """ % sync) script = script + sync_script # Copy the script to a NamedTemporaryFile rather than just pumping it # to slonik via stdin. This way it can be examined if slonik appears # to hang. script_on_disk = NamedTemporaryFile(prefix="slonik", suffix=".sk") print >> script_on_disk, script script_on_disk.flush() # Run slonik log.debug("Executing slonik script %s" % script_on_disk.name) log.log(DEBUG2, 'Running script:\n%s' % script) returncode = subprocess.call(['slonik', script_on_disk.name]) if returncode != 0: log.error("slonik script failed") if exit_on_fail: raise SystemExit(1) return returncode == 0
def mergePOTMsgSets(self): """Merge POTMsgSets for given sequence of sharing templates.""" subordinates, representative_templates = self._mapRepresentatives() num_representatives = len(subordinates) representative_num = 0 for representative, potmsgsets in subordinates.iteritems(): representative_num += 1 log.debug("Message %d/%d: %d subordinate(s)." % ( representative_num, num_representatives, len(potmsgsets))) seen_potmsgsets = set([representative.id]) potmsgset_deletions = 0 tm_deletions = 0 # Merge each subordinate POTMsgSet into its representative. for subordinate in potmsgsets: if subordinate.id in seen_potmsgsets: continue seen_potmsgsets.add(subordinate.id) for message in subordinate.getAllTranslationMessages(): message = removeSecurityProxy(message) clashing_current, clashing_imported, twin = ( self._findClashes( message, representative, message.potemplate)) if clashing_current or clashing_imported: saved = self._saveByDiverging( message, representative, subordinate) else: saved = False if not saved: if twin is None: # This message will have to lose some flags, but # then it can still move to the new potmsgset. sacrifice_flags( message, (clashing_current, clashing_imported)) message.potmsgset = representative else: # This message is identical in contents to one # that was more representative. It'll have to # die, but maybe it can bequeathe some of its # status to the existing message. # Since there are no clashes, there's no need to # check for clashes with other current/imported # messages in the target context. bequeathe_flags( message, twin, (clashing_current, clashing_imported)) tm_deletions += 1 merge_translationtemplateitems( subordinate, representative, representative_templates[representative]) removeSecurityProxy(subordinate).destroySelf() potmsgset_deletions += 1 self.tm.endTransaction(intermediate=True) report = "Deleted POTMsgSets: %d. TranslationMessages: %d." % ( potmsgset_deletions, tm_deletions) if potmsgset_deletions > 0 or tm_deletions > 0: log.info(report) else: log.log(DEBUG2, report)
def main(self): actions = ( self.options.remove_duplicates or self.options.merge_potmsgsets or self.options.merge_translationmessages) if not actions: raise LaunchpadScriptFailure( "Select at least one action: remove duplicates, merge " "POTMsgSets, and/or merge TranslationMessages.") if self.options.product and self.options.distribution: raise LaunchpadScriptFailure( "Merge a product or a distribution, but not both.") if not (self.options.product or self.options.distribution): raise LaunchpadScriptFailure( "Specify a product or distribution to merge.") if self.options.sourcepackage and not self.options.distribution: raise LaunchpadScriptFailure( "Selecting a package only makes sense for distributions.") if self.options.product: product = getUtility(IProductSet).getByName(self.options.product) distribution = None if product is None: raise LaunchpadScriptFailure( "Unknown product: '%s'" % self.options.product) else: product = None # import here to avoid circular import. from lp.registry.interfaces.distribution import IDistributionSet distribution = getUtility(IDistributionSet).getByName( self.options.distribution) if distribution is None: raise LaunchpadScriptFailure( "Unknown distribution: '%s'" % self.options.distribution) if self.options.sourcepackage is None: sourcepackagename = None else: sourcepackagename = getUtility(ISourcePackageNameSet).queryByName( self.options.sourcepackage) if sourcepackagename is None: raise LaunchpadScriptFailure( "Unknown source package name: '%s'" % self.options.sourcepackage) self._setUpUtilities() subset = self.template_set.getSharingSubset( product=product, distribution=distribution, sourcepackagename=sourcepackagename) equivalence_classes = subset.groupEquivalentPOTemplates( self.options.template_names) class_count = len(equivalence_classes) log.info("Merging %d template equivalence classes." % class_count) tm = TransactionManager(self.txn, self.options.dry_run) for number, name in enumerate(sorted(equivalence_classes.iterkeys())): templates = equivalence_classes[name] log.info( "Merging equivalence class '%s': %d template(s) (%d / %d)" % ( name, len(templates), number + 1, class_count)) log.debug("Templates: %s" % str(templates)) merger = TranslationMerger(templates, tm) if self.options.remove_duplicates: log.info("Removing duplicate messages.") merger.removeDuplicateMessages() tm.endTransaction(intermediate=True) if self.options.merge_potmsgsets: log.info("Merging POTMsgSets.") merger.mergePOTMsgSets() tm.endTransaction(intermediate=True) if self.options.merge_translationmessages: log.info("Merging TranslationMessages.") merger.mergeTranslationMessages() tm.endTransaction() log.info("Done.")
def mergePOTMsgSets(self): """Merge POTMsgSets for given sequence of sharing templates.""" subordinates, representative_templates = self._mapRepresentatives() num_representatives = len(subordinates) representative_num = 0 for representative, potmsgsets in subordinates.iteritems(): representative_num += 1 log.debug( "Message %d/%d: %d subordinate(s)." % (representative_num, num_representatives, len(potmsgsets))) seen_potmsgsets = set([representative.id]) potmsgset_deletions = 0 tm_deletions = 0 # Merge each subordinate POTMsgSet into its representative. for subordinate in potmsgsets: if subordinate.id in seen_potmsgsets: continue seen_potmsgsets.add(subordinate.id) for message in subordinate.getAllTranslationMessages(): message = removeSecurityProxy(message) clashing_current, clashing_imported, twin = ( self._findClashes(message, representative, message.potemplate)) if clashing_current or clashing_imported: saved = self._saveByDiverging(message, representative, subordinate) else: saved = False if not saved: if twin is None: # This message will have to lose some flags, but # then it can still move to the new potmsgset. sacrifice_flags( message, (clashing_current, clashing_imported)) message.potmsgset = representative else: # This message is identical in contents to one # that was more representative. It'll have to # die, but maybe it can bequeathe some of its # status to the existing message. # Since there are no clashes, there's no need to # check for clashes with other current/imported # messages in the target context. bequeathe_flags( message, twin, (clashing_current, clashing_imported)) tm_deletions += 1 merge_translationtemplateitems( subordinate, representative, representative_templates[representative]) removeSecurityProxy(subordinate).destroySelf() potmsgset_deletions += 1 self.tm.endTransaction(intermediate=True) report = "Deleted POTMsgSets: %d. TranslationMessages: %d." % ( potmsgset_deletions, tm_deletions) if potmsgset_deletions > 0 or tm_deletions > 0: log.info(report) else: log.log(DEBUG2, report)
def main(self): actions = (self.options.remove_duplicates or self.options.merge_potmsgsets or self.options.merge_translationmessages) if not actions: raise LaunchpadScriptFailure( "Select at least one action: remove duplicates, merge " "POTMsgSets, and/or merge TranslationMessages.") if self.options.product and self.options.distribution: raise LaunchpadScriptFailure( "Merge a product or a distribution, but not both.") if not (self.options.product or self.options.distribution): raise LaunchpadScriptFailure( "Specify a product or distribution to merge.") if self.options.sourcepackage and not self.options.distribution: raise LaunchpadScriptFailure( "Selecting a package only makes sense for distributions.") if self.options.product: product = getUtility(IProductSet).getByName(self.options.product) distribution = None if product is None: raise LaunchpadScriptFailure("Unknown product: '%s'" % self.options.product) else: product = None # import here to avoid circular import. from lp.registry.interfaces.distribution import IDistributionSet distribution = getUtility(IDistributionSet).getByName( self.options.distribution) if distribution is None: raise LaunchpadScriptFailure("Unknown distribution: '%s'" % self.options.distribution) if self.options.sourcepackage is None: sourcepackagename = None else: sourcepackagename = getUtility(ISourcePackageNameSet).queryByName( self.options.sourcepackage) if sourcepackagename is None: raise LaunchpadScriptFailure( "Unknown source package name: '%s'" % self.options.sourcepackage) self._setUpUtilities() subset = self.template_set.getSharingSubset( product=product, distribution=distribution, sourcepackagename=sourcepackagename) template_regex = self.options.template_names if template_regex is not None: template_regex = six.ensure_text(template_regex) equivalence_classes = subset.groupEquivalentPOTemplates(template_regex) class_count = len(equivalence_classes) log.info("Merging %d template equivalence classes." % class_count) tm = TransactionManager(self.txn, self.options.dry_run) for number, name in enumerate(sorted(equivalence_classes.iterkeys())): templates = equivalence_classes[name] log.info( "Merging equivalence class '%s': %d template(s) (%d / %d)" % (name, len(templates), number + 1, class_count)) log.debug("Templates: %s" % str(templates)) merger = TranslationMerger(templates, tm) if self.options.remove_duplicates: log.info("Removing duplicate messages.") merger.removeDuplicateMessages() tm.endTransaction(intermediate=True) if self.options.merge_potmsgsets: log.info("Merging POTMsgSets.") merger.mergePOTMsgSets() tm.endTransaction(intermediate=True) if self.options.merge_translationmessages: log.info("Merging TranslationMessages.") merger.mergeTranslationMessages() tm.endTransaction() log.info("Done.")
def calculate_replication_set(cur, seeds): """Return the minimal set of tables and sequences needed in a replication set containing the seed table. A replication set must contain all tables linked by foreign key reference to the given table, and sequences used to generate keys. Tables and sequences can be added to the IGNORED_TABLES and IGNORED_SEQUENCES lists for cases where we known can safely ignore this restriction. :param seeds: [(namespace, tablename), ...] :returns: (tables, sequences) """ # Results tables = set() sequences = set() # Our pending set to check pending_tables = set(seeds) # Generate the set of tables that reference the seed directly # or indirectly via foreign key constraints, including the seed itself. while pending_tables: namespace, tablename = pending_tables.pop() # Skip if the table doesn't exist - we might have seeds listed that # have been removed or are yet to be created. cur.execute(""" SELECT TRUE FROM pg_class, pg_namespace WHERE pg_class.relnamespace = pg_namespace.oid AND pg_namespace.nspname = %s AND pg_class.relname = %s """ % sqlvalues(namespace, tablename)) if cur.fetchone() is None: log.debug("Table %s.%s doesn't exist" % (namespace, tablename)) continue tables.add((namespace, tablename)) # Find all tables that reference the current (seed) table # and all tables that the seed table references. cur.execute(""" SELECT ref_namespace.nspname, ref_class.relname FROM -- One of the seed tables pg_class AS seed_class, pg_namespace AS seed_namespace, -- A table referencing the seed, or being referenced by -- the seed. pg_class AS ref_class, pg_namespace AS ref_namespace, pg_constraint WHERE seed_class.relnamespace = seed_namespace.oid AND ref_class.relnamespace = ref_namespace.oid AND seed_namespace.nspname = %s AND seed_class.relname = %s -- Foreign key constraints are all we care about. AND pg_constraint.contype = 'f' -- We want tables referenced by, or referred to, the -- seed table. AND ((pg_constraint.conrelid = ref_class.oid AND pg_constraint.confrelid = seed_class.oid) OR (pg_constraint.conrelid = seed_class.oid AND pg_constraint.confrelid = ref_class.oid) ) """ % sqlvalues(namespace, tablename)) for namespace, tablename in cur.fetchall(): key = (namespace, tablename) if (key not in tables and key not in pending_tables and '%s.%s' % (namespace, tablename) not in IGNORED_TABLES): pending_tables.add(key) # Generate the set of sequences that are linked to any of our set of # tables. We assume these are all sequences created by creation of # serial or bigserial columns, or other sequences OWNED BY a particular # column. for namespace, tablename in tables: cur.execute(""" SELECT seq FROM ( SELECT pg_get_serial_sequence(%s, attname) AS seq FROM pg_namespace, pg_class, pg_attribute WHERE pg_namespace.nspname = %s AND pg_class.relnamespace = pg_namespace.oid AND pg_class.relname = %s AND pg_attribute.attrelid = pg_class.oid AND pg_attribute.attisdropped IS FALSE ) AS whatever WHERE seq IS NOT NULL; """ % sqlvalues(fqn(namespace, tablename), namespace, tablename)) for sequence, in cur.fetchall(): if sequence not in IGNORED_SEQUENCES: sequences.add(sequence) # We can't easily convert the sequence name to (namespace, name) tuples, # so we might as well convert the tables to dot notation for consistancy. tables = set(fqn(namespace, tablename) for namespace, tablename in tables) return tables, sequences