def handle(self, *args, **options): self._check_options(args, options) setup_logging(self, options["verbosity"]) # main list_address = options["list_address"].lower() ## Keep autocommit on SQLite: ## https://docs.djangoproject.com/en/1.6/topics/db/transactions/#savepoints-in-sqlite #if settings.DATABASES["default"]["ENGINE"] != "django.db.backends.sqlite3": # transaction.set_autocommit(False) settings.HYPERKITTY_BATCH_MODE = True # Only import emails older than the latest email in the DB latest_email_date = Email.objects.filter( mailinglist__name=list_address ).values("date").order_by("-date").first() if latest_email_date and not options["since"]: options["since"] = latest_email_date["date"] if options["since"] and options["verbosity"] >= 2: self.stdout.write("Only emails after %s will be imported" % options["since"]) importer = DbImporter(list_address, options, self.stdout, self.stderr) # disable mailman client for now for mbfile in args: if options["verbosity"] >= 1: self.stdout.write("Importing from mbox file %s to %s" % (mbfile, list_address)) if not options["ignore_mtime"] and options["since"] is not None: mtime = datetime.fromtimestamp( os.path.getmtime(mbfile), tz.tzlocal()) if mtime <= options["since"]: if options["verbosity"] >= 2: self.stdout.write('Mailbox file for %s is too old' % list_address) continue importer.from_mbox(mbfile) if options["verbosity"] >= 2: total_in_list = Email.objects.filter( mailinglist__name=list_address).count() self.stdout.write(' %s emails are stored into the database' % total_in_list) #timeit("start") if options["verbosity"] >= 1: self.stdout.write("Computing thread structure") for thread in Thread.objects.filter( id__in=importer.impacted_thread_ids): #timeit("before") compute_thread_order_and_depth(thread) #timeit("after") #showtimes() if not options["no_sync_mailman"]: if options["verbosity"] >= 1: self.stdout.write("Synchronizing properties with Mailman") sync_with_mailman() #if not transaction.get_autocommit(): # transaction.commit() if options["verbosity"] >= 1: self.stdout.write( "The full-text search index will be updated every minute. Run " "the 'manage.py runjob update_index' command to update it now." )
def handle(self, *args, **options): setup_logging(self, options["verbosity"]) mlists = [ MailingList.objects.get(name=name) for name in options["mlists"] ] if not mlists: mlists = MailingList.objects.order_by("name").all() for mlist in mlists: self.warm_up_mlist(mlist, options)
def handle(self, *args, **options): options["verbosity"] = int(options.get("verbosity", "1")) setup_logging(self, options["verbosity"]) if args: raise CommandError("no arguments allowed") sync_with_mailman(overwrite=options.get("overwrite", False))
def handle(self, *args, **options): options["verbosity"] = int(options.get("verbosity", "1")) setup_logging(self, options["verbosity"]) update_index(listname=options.get("listname")[0], verbosity=options["verbosity"])
def handle(self, *args, **options): self._check_options(args, options) setup_logging(self, options["verbosity"]) p = Pool(5) p.map(_archive_downloader, itertools.product([options], options["start"], MONTHS))
def handle(self, *args, **options): self._check_options(options) setup_logging(self, options["verbosity"]) # main list_address = options["list_address"].lower() # Keep autocommit on SQLite: # https://docs.djangoproject.com/en/1.8/topics/db/transactions/#savepoints-in-sqlite # if (settings.DATABASES["default"]["ENGINE"] # != "django.db.backends.sqlite3": # transaction.set_autocommit(False) settings.HYPERKITTY_BATCH_MODE = True # Only import emails newer than the latest email in the DB latest_email_date = Email.objects.filter( mailinglist__name=list_address).values("date").order_by( "-date").first() if latest_email_date and not options["since"]: options["since"] = latest_email_date["date"] if options["since"] and options["verbosity"] >= 2: self.stdout.write("Only emails after %s will be imported" % options["since"]) importer = DbImporter(list_address, options, self.stdout, self.stderr) # disable mailman client for now for mbfile in options["mbox"]: if options["verbosity"] >= 1: self.stdout.write("Importing from mbox file %s to %s" % (mbfile, list_address)) if not options["ignore_mtime"] and options["since"] is not None: mtime = datetime.fromtimestamp(os.path.getmtime(mbfile), tz.tzlocal()) if mtime <= options["since"]: if options["verbosity"] >= 2: self.stdout.write('Mailbox file for %s is too old' % list_address) continue importer.from_mbox(mbfile) if options["verbosity"] >= 2: total_in_list = Email.objects.filter( mailinglist__name=list_address).count() self.stdout.write(' %s emails are stored into the database' % total_in_list) if options["verbosity"] >= 1: self.stdout.write("Computing thread structure") # Work on batches of thread ids to avoid creating a huge SQL request # (it's an IN statement) thread_ids = list(importer.impacted_thread_ids) while thread_ids: thread_ids_batch = thread_ids[:100] thread_ids = thread_ids[100:] for thread in Thread.objects.filter(id__in=thread_ids_batch): compute_thread_order_and_depth(thread) if not options["no_sync_mailman"]: if options["verbosity"] >= 1: self.stdout.write("Synchronizing properties with Mailman") sync_with_mailman() # if not transaction.get_autocommit(): # transaction.commit() if options["verbosity"] >= 1: self.stdout.write("Warming up cache") call_command("hyperkitty_warm_up_cache", list_address) if options["verbosity"] >= 1: self.stdout.write( "The full-text search index is not updated for this list. " "It will not be updated by the 'minutely' incremental " "update job. To update the index for this list, run the " "'manage.py update_index_one_list {}' command.".format( list_address))