def test_call_update_from_mailman(self): for i in range(10): MailingList.objects.create(name="*****@*****.**" % i) mailman.sync_with_mailman() # Track calls to MailingList.update_from_mailman() using the client's # get_list() method. Didn't find anything better. self.assertEqual(self.mailman_client.get_list.call_count, 10)
def test_call_update_from_mailman(self): for i in range(10): MailingList.objects.create(name="*****@*****.**" % i) mailman.sync_with_mailman() # Track calls to MailingList.update_from_mailman() using the client's # get_list() method. Didn't find anything better. self.assertEqual(self.mailman_client.get_list.call_count, 10)
def handle(self, *args, **options): self._check_options(args, options) setup_logging(self, options["verbosity"]) # main list_address = options["list_address"].lower() ## Keep autocommit on SQLite: ## https://docs.djangoproject.com/en/1.6/topics/db/transactions/#savepoints-in-sqlite #if settings.DATABASES["default"]["ENGINE"] != "django.db.backends.sqlite3": # transaction.set_autocommit(False) settings.HYPERKITTY_BATCH_MODE = True # Only import emails older than the latest email in the DB latest_email_date = Email.objects.filter( mailinglist__name=list_address ).values("date").order_by("-date").first() if latest_email_date and not options["since"]: options["since"] = latest_email_date["date"] if options["since"] and options["verbosity"] >= 2: self.stdout.write("Only emails after %s will be imported" % options["since"]) importer = DbImporter(list_address, options, self.stdout, self.stderr) # disable mailman client for now for mbfile in args: if options["verbosity"] >= 1: self.stdout.write("Importing from mbox file %s to %s" % (mbfile, list_address)) if not options["ignore_mtime"] and options["since"] is not None: mtime = datetime.fromtimestamp( os.path.getmtime(mbfile), tz.tzlocal()) if mtime <= options["since"]: if options["verbosity"] >= 2: self.stdout.write('Mailbox file for %s is too old' % list_address) continue importer.from_mbox(mbfile) if options["verbosity"] >= 2: total_in_list = Email.objects.filter( mailinglist__name=list_address).count() self.stdout.write(' %s emails are stored into the database' % total_in_list) #timeit("start") if options["verbosity"] >= 1: self.stdout.write("Computing thread structure") for thread in Thread.objects.filter( id__in=importer.impacted_thread_ids): #timeit("before") compute_thread_order_and_depth(thread) #timeit("after") #showtimes() if not options["no_sync_mailman"]: if options["verbosity"] >= 1: self.stdout.write("Synchronizing properties with Mailman") sync_with_mailman() #if not transaction.get_autocommit(): # transaction.commit() if options["verbosity"] >= 1: self.stdout.write( "The full-text search index will be updated every minute. Run " "the 'manage.py runjob update_index' command to update it now." )
def handle(self, *args, **options): options["verbosity"] = int(options.get("verbosity", "1")) # logging if options["verbosity"] >= 3: debuglevel = logging.DEBUG else: debuglevel = logging.INFO logging.basicConfig(format='%(message)s', level=debuglevel) if args: raise CommandError("no arguments allowed") sync_with_mailman()
def test_call_set_mailman_id(self): mock_user = Mock() mock_user.user_id = "from-mailman" self.mailman_client.get_user.side_effect = lambda a: mock_user for i in range(10): Sender.objects.create(address="*****@*****.**" % i) for i in range(10, 20): Sender.objects.create(address="*****@*****.**" % i, mailman_id="already-set") mailman.sync_with_mailman() # Track calls to Sender.set_mailman_id() using the client's # get_user() method. Didn't find anything better. # Only senders with a mailman_id to None should have been considered. self.assertEqual(self.mailman_client.get_user.call_count, 10) self.assertEqual( Sender.objects.filter(mailman_id__isnull=True).count(), 0) self.assertEqual( Sender.objects.filter(mailman_id="from-mailman").count(), 10) self.assertEqual( Sender.objects.filter(mailman_id="already-set").count(), 10)
def test_call_set_mailman_id(self): mock_user = Mock() mock_user.user_id = "from-mailman" self.mailman_client.get_user.side_effect = lambda a: mock_user for i in range(10): Sender.objects.create(address="*****@*****.**" % i) for i in range(10, 20): Sender.objects.create(address="*****@*****.**" % i, mailman_id="already-set") mailman.sync_with_mailman() # Track calls to Sender.set_mailman_id() using the client's # get_user() method. Didn't find anything better. # Only senders with a mailman_id to None should have been considered. self.assertEqual(self.mailman_client.get_user.call_count, 10) self.assertEqual( Sender.objects.filter(mailman_id__isnull=True).count(), 0) self.assertEqual( Sender.objects.filter(mailman_id="from-mailman").count(), 10) self.assertEqual( Sender.objects.filter(mailman_id="already-set").count(), 10)
def handle(self, *args, **options): options["verbosity"] = int(options.get("verbosity", "1")) setup_logging(self, options["verbosity"]) if args: raise CommandError("no arguments allowed") sync_with_mailman(overwrite=options.get("overwrite", False))
def execute(self): sync_with_mailman()
def handle(self, *args, **options): self._check_options(options) setup_logging(self, options["verbosity"]) # main list_address = options["list_address"].lower() # Keep autocommit on SQLite: # https://docs.djangoproject.com/en/1.8/topics/db/transactions/#savepoints-in-sqlite # if (settings.DATABASES["default"]["ENGINE"] # != "django.db.backends.sqlite3": # transaction.set_autocommit(False) settings.HYPERKITTY_BATCH_MODE = True # Only import emails newer than the latest email in the DB latest_email_date = Email.objects.filter( mailinglist__name=list_address).values("date").order_by( "-date").first() if latest_email_date and not options["since"]: options["since"] = latest_email_date["date"] if options["since"] and options["verbosity"] >= 2: self.stdout.write("Only emails after %s will be imported" % options["since"]) importer = DbImporter(list_address, options, self.stdout, self.stderr) # disable mailman client for now for mbfile in options["mbox"]: if options["verbosity"] >= 1: self.stdout.write("Importing from mbox file %s to %s" % (mbfile, list_address)) if not options["ignore_mtime"] and options["since"] is not None: mtime = datetime.fromtimestamp(os.path.getmtime(mbfile), tz.tzlocal()) if mtime <= options["since"]: if options["verbosity"] >= 2: self.stdout.write('Mailbox file for %s is too old' % list_address) continue importer.from_mbox(mbfile) if options["verbosity"] >= 2: total_in_list = Email.objects.filter( mailinglist__name=list_address).count() self.stdout.write(' %s emails are stored into the database' % total_in_list) if options["verbosity"] >= 1: self.stdout.write("Computing thread structure") # Work on batches of thread ids to avoid creating a huge SQL request # (it's an IN statement) thread_ids = list(importer.impacted_thread_ids) while thread_ids: thread_ids_batch = thread_ids[:100] thread_ids = thread_ids[100:] for thread in Thread.objects.filter(id__in=thread_ids_batch): compute_thread_order_and_depth(thread) if not options["no_sync_mailman"]: if options["verbosity"] >= 1: self.stdout.write("Synchronizing properties with Mailman") sync_with_mailman() # if not transaction.get_autocommit(): # transaction.commit() if options["verbosity"] >= 1: self.stdout.write("Warming up cache") call_command("hyperkitty_warm_up_cache", list_address) if options["verbosity"] >= 1: self.stdout.write( "The full-text search index is not updated for this list. " "It will not be updated by the 'minutely' incremental " "update job. To update the index for this list, run the " "'manage.py update_index_one_list {}' command.".format( list_address))