def _pourTable(self, holding_table, table, has_new_id, transaction_manager): """Pour contents of a holding table back into its source table. This will commit transaction_manager, typically multiple times. """ if has_new_id: # Update ids in holding table from originals to copies. To # broaden the caller's opportunity to manipulate rows in the # holding tables, we skip rows that have new_id set to null. cur = cursor() cur.execute("UPDATE %s SET id=new_id" % holding_table) # Restore table to original schema cur.execute("ALTER TABLE %s DROP COLUMN new_id" % holding_table) self._commit(transaction_manager) self.logger.debug("...rearranged ids...") callback = self.pre_pouring_callbacks.get(table) if callback is not None: callback(holding_table, table) # Now pour holding table's data into its source table. This is where # we start writing to tables that other clients will be reading, and # our transaction will usually be serializable, so row locks are a # concern. Break the writes up in batches of at least a thousand # rows. The goal is to have these transactions running no longer than # five seconds or so each; we aim for four just to be sure. pourer = PouringLoop(holding_table, table, transaction_manager, self.logger, self.batch_pouring_callbacks.get(table)) DBLoopTuner(pourer, self.seconds_per_batch, self.minimum_batch_size).run()
def run(self): products_with_templates = list(self.getProductsWithTemplates()) total_products = len(products_with_templates) if total_products == 0: self.logger.info("Nothing to do.") current_product = 0 for product in products_with_templates: current_product += 1 self.logger.info("Migrating %s translations (%d of %d)..." % (product.name, current_product, total_products)) tm_ids = self.getCurrentNonimportedTranslations(product) tm_loop = TranslationMessageImportedFlagUpdater( self.transaction, self.logger, tm_ids) DBLoopTuner(tm_loop, 5, minimum_chunk_size=100).run() self.logger.info("Done.")
def run(self): self.logger.info("Starting verification of POFile stats at id %d" % self.start_at_id) loop = Verifier(self.transaction, self.logger, self.start_at_id) # Since the script can run for a long time, our deployment # process might remove the Launchpad tree the script was run # from, thus the script failing to find the email template # if it was attempted after DBLoopTuner run is completed. # See bug #811447 for OOPS we used to get then. template = get_email_template('pofile-stats.txt', 'translations') # Each iteration of our loop collects all statistics first, before # modifying any rows in the database. With any locks on the database # acquired only at the very end of the iteration, we can afford to # make relatively long, low-overhead iterations without disrupting # application response times. iteration_duration = ( config.rosetta_pofile_stats.looptuner_iteration_duration) DBLoopTuner(loop, iteration_duration).run() if loop.total_incorrect > 0 or loop.total_exceptions > 0: # Not all statistics were correct, or there were failures while # checking them. Email the admins. message = template % { 'exceptions': loop.total_exceptions, 'errors': loop.total_incorrect, 'total': loop.total_checked } simple_sendmail(from_addr=config.canonical.noreply_from_address, to_addrs=[config.launchpad.errors_address], subject="POFile statistics errors", body=MailWrapper().format(message)) self.transaction.commit() self.logger.info("Done.")
def run(self): loop = CreditsFixer(self.transaction, self.logger) DBLoopTuner(loop, 5).run() self.logger.info("Done.")
def main(self): series = self._getTargetSeries() for statement in statements: delete = ExecuteLoop(statement, series, self.logger) tuner = DBLoopTuner(delete, 2.0, maximum_chunk_size=5000) tuner.run()
def delete_unreferenced_content(con): """Invoke UnreferencedContentPruner.""" loop_tuner = DBLoopTuner(UnreferencedContentPruner(con), 5, log=log) loop_tuner.run()
def delete_unreferenced_aliases(con): "Run the UnreferencedLibraryFileAliasPruner." loop_tuner = DBLoopTuner( UnreferencedLibraryFileAliasPruner(con), 5, log=log) loop_tuner.run()
def expire_aliases(con): """Invoke ExpireLibraryFileAliases.""" loop_tuner = DBLoopTuner(ExpireAliases(con), 5, log=log) loop_tuner.run()