示例#1
0
    def test_invites_worker(self) -> None:
        fake_client = self.FakeClient()
        invitor = self.example_user('iago')
        prereg_alice = PreregistrationUser.objects.create(
            email=self.nonreg_email('alice'), referred_by=invitor, realm=invitor.realm)
        PreregistrationUser.objects.create(
            email=self.nonreg_email('bob'), referred_by=invitor, realm=invitor.realm)
        data = [
            dict(prereg_id=prereg_alice.id, referrer_id=invitor.id, email_body=None),
            # Nonexistent prereg_id, as if the invitation was deleted
            dict(prereg_id=-1, referrer_id=invitor.id, email_body=None),
            # Form with `email` is from versions up to Zulip 1.7.1
            dict(email=self.nonreg_email('bob'), referrer_id=invitor.id, email_body=None),
        ]
        for element in data:
            fake_client.queue.append(('invites', element))

        with simulated_queue_client(lambda: fake_client):
            worker = queue_processors.ConfirmationEmailWorker()
            worker.setup()
            with patch('zerver.worker.queue_processors.do_send_confirmation_email'), \
                    patch('zerver.worker.queue_processors.create_confirmation_link'), \
                    patch('zerver.worker.queue_processors.send_future_email') \
                    as send_mock, \
                    patch('logging.info'):
                worker.start()
                self.assertEqual(send_mock.call_count, 2)
示例#2
0
    def test_UserActivityWorker(self) -> None:
        fake_client = self.FakeClient()

        user = self.example_user('hamlet')
        UserActivity.objects.filter(
            user_profile = user.id,
            client = get_client('ios')
        ).delete()

        data = dict(
            user_profile_id = user.id,
            client = 'ios',
            time = time.time(),
            query = 'send_message'
        )
        fake_client.queue.append(('user_activity', data))

        with simulated_queue_client(lambda: fake_client):
            worker = queue_processors.UserActivityWorker()
            worker.setup()
            worker.start()
            activity_records = UserActivity.objects.filter(
                user_profile = user.id,
                client = get_client('ios')
            )
            self.assertTrue(len(activity_records), 1)
            self.assertTrue(activity_records[0].count, 1)
示例#3
0
    def test_error_handling(self) -> None:
        processed = []

        @queue_processors.assign_queue('unreliable_worker')
        class UnreliableWorker(queue_processors.QueueProcessingWorker):
            def consume(self, data: Mapping[str, Any]) -> None:
                if data["type"] == 'unexpected behaviour':
                    raise Exception('Worker task not performing as expected!')
                processed.append(data["type"])

            def _log_problem(self) -> None:

                # keep the tests quiet
                pass

        fake_client = self.FakeClient()
        for msg in ['good', 'fine', 'unexpected behaviour', 'back to normal']:
            fake_client.queue.append(('unreliable_worker', {'type': msg}))

        fn = os.path.join(settings.QUEUE_ERROR_DIR, 'unreliable_worker.errors')
        try:
            os.remove(fn)
        except OSError:  # nocoverage # error handling for the directory not existing
            pass

        with simulated_queue_client(lambda: fake_client):
            worker = UnreliableWorker()
            worker.setup()
            worker.start()

        self.assertEqual(processed, ['good', 'fine', 'back to normal'])
        line = open(fn).readline().strip()
        event = ujson.loads(line.split('\t')[1])
        self.assertEqual(event["type"], 'unexpected behaviour')
示例#4
0
    def test_email_sending_worker_retries(self) -> None:
        """Tests the retry_send_email_failures decorator to make sure it
        retries sending the email 3 times and then gives up."""
        fake_client = self.FakeClient()

        data = {
            'template_prefix': 'zerver/emails/confirm_new_email',
            'to_email': self.example_email("hamlet"),
            'from_name': 'Zulip Account Security',
            'from_address': FromAddress.NOREPLY,
            'context': {}
        }
        fake_client.queue.append(('email_senders', data))

        def fake_publish(queue_name: str,
                         event: Dict[str, Any],
                         processor: Callable[[Any], None]) -> None:
            fake_client.queue.append((queue_name, event))

        with simulated_queue_client(lambda: fake_client):
            worker = queue_processors.EmailSendingWorker()
            worker.setup()
            with patch('zerver.lib.send_email.build_email',
                       side_effect=smtplib.SMTPServerDisconnected), \
                    patch('zerver.lib.queue.queue_json_publish',
                          side_effect=fake_publish), \
                    patch('logging.exception'):
                worker.start()

        self.assertEqual(data['failed_tries'], 4)
示例#5
0
    def test_signups_worker_retries(self) -> None:
        """Tests the retry logic of signups queue."""
        fake_client = self.FakeClient()

        user_id = self.example_user('hamlet').id
        data = {'user_id': user_id, 'id': 'test_missed'}
        fake_client.queue.append(('signups', data))

        def fake_publish(queue_name: str, event: Dict[str, Any], processor: Callable[[Any], None]) -> None:
            fake_client.queue.append((queue_name, event))

        fake_response = MagicMock()
        fake_response.status_code = 400
        fake_response.text = ujson.dumps({'title': ''})
        with simulated_queue_client(lambda: fake_client):
            worker = queue_processors.SignupWorker()
            worker.setup()
            with patch('zerver.worker.queue_processors.requests.post',
                       return_value=fake_response), \
                    patch('zerver.lib.queue.queue_json_publish',
                          side_effect=fake_publish), \
                    patch('logging.info'), \
                    self.settings(MAILCHIMP_API_KEY='one-two',
                                  PRODUCTION=True,
                                  ZULIP_FRIENDS_LIST_ID='id'):
                worker.start()

        self.assertEqual(data['failed_tries'], 4)
示例#6
0
    def test_mirror_worker(self) -> None:
        fake_client = self.FakeClient()
        data = [
            dict(
                message=u'\xf3test',
                time=time.time(),
                rcpt_to=self.example_email('hamlet'),
            ),
            dict(
                message='\xf3test',
                time=time.time(),
                rcpt_to=self.example_email('hamlet'),
            ),
            dict(
                message='test',
                time=time.time(),
                rcpt_to=self.example_email('hamlet'),
            ),
        ]
        for element in data:
            fake_client.queue.append(('email_mirror', element))

        with patch('zerver.worker.queue_processors.mirror_email'):
            with simulated_queue_client(lambda: fake_client):
                worker = queue_processors.MirrorWorker()
                worker.setup()
                worker.start()
示例#7
0
    def test_signups_bad_request(self) -> None:
        fake_client = self.FakeClient()

        user_id = self.example_user('hamlet').id
        data = {'user_id': user_id, 'id': 'test_missed'}
        fake_client.queue.append(('signups', data))

        fake_response = MagicMock()
        fake_response.status_code = 444  # Any non-400 bad request code.
        fake_response.text = ujson.dumps({'title': 'Member Exists'})
        with simulated_queue_client(lambda: fake_client):
            worker = queue_processors.SignupWorker()
            worker.setup()
            with patch('zerver.worker.queue_processors.requests.post',
                       return_value=fake_response), \
                    self.settings(MAILCHIMP_API_KEY='one-two',
                                  PRODUCTION=True,
                                  ZULIP_FRIENDS_LIST_ID='id'):
                    worker.start()
                    fake_response.raise_for_status.assert_called_once()
示例#8
0
    def test_slow_queries_worker(self) -> None:
        error_bot = get_system_bot(settings.ERROR_BOT)
        fake_client = self.FakeClient()
        events = [
            'test query (data)',
            'second test query (data)',
        ]
        for event in events:
            fake_client.queue.append(('slow_queries', event))

        worker = SlowQueryWorker()

        time_mock = patch(
            'zerver.worker.queue_processors.time.sleep',
            side_effect=AbortLoop,
        )

        send_mock = patch(
            'zerver.worker.queue_processors.internal_send_message'
        )

        with send_mock as sm, time_mock as tm:
            with simulated_queue_client(lambda: fake_client):
                try:
                    worker.setup()
                    worker.start()
                except AbortLoop:
                    pass

        self.assertEqual(tm.call_args[0][0], 60)  # should sleep 60 seconds

        sm.assert_called_once()
        args = [c[0] for c in sm.call_args_list][0]
        self.assertEqual(args[0], error_bot.realm)
        self.assertEqual(args[1], error_bot.email)
        self.assertEqual(args[2], "stream")
        self.assertEqual(args[3], "errors")
        self.assertEqual(args[4], "testserver: slow queries")
        self.assertEqual(args[5], "    test query (data)\n    second test query (data)\n")
示例#9
0
    def test_email_sending_worker_retries(self) -> None:
        """Tests the retry_send_email_failures decorator to make sure it
        retries sending the email 3 times and then gives up."""
        fake_client = self.FakeClient()

        data = {'test': 'test', 'id': 'test_missed'}
        fake_client.queue.append(('email_senders', data))

        def fake_publish(queue_name: str,
                         event: Dict[str, Any],
                         processor: Callable[[Any], None]) -> None:
            fake_client.queue.append((queue_name, event))

        with simulated_queue_client(lambda: fake_client):
            worker = queue_processors.EmailSendingWorker()
            worker.setup()
            with patch('zerver.worker.queue_processors.send_email_from_dict',
                       side_effect=smtplib.SMTPServerDisconnected), \
                    patch('zerver.lib.queue.queue_json_publish',
                          side_effect=fake_publish), \
                    patch('logging.exception'):
                worker.start()

        self.assertEqual(data['failed_tries'], 4)
示例#10
0
    def test_signups_worker_existing_member(self) -> None:
        fake_client = self.FakeClient()

        user_id = self.example_user('hamlet').id
        data = {'user_id': user_id,
                'id': 'test_missed',
                'email_address': '*****@*****.**'}
        fake_client.queue.append(('signups', data))

        fake_response = MagicMock()
        fake_response.status_code = 400
        fake_response.text = ujson.dumps({'title': 'Member Exists'})
        with simulated_queue_client(lambda: fake_client):
            worker = queue_processors.SignupWorker()
            worker.setup()
            with patch('zerver.worker.queue_processors.requests.post',
                       return_value=fake_response), \
                    self.settings(MAILCHIMP_API_KEY='one-two',
                                  PRODUCTION=True,
                                  ZULIP_FRIENDS_LIST_ID='id'):
                with patch('logging.warning') as logging_warning_mock:
                    worker.start()
                    logging_warning_mock.assert_called_once_with(
                        "Attempted to sign up already existing email to list: [email protected]")
示例#11
0
    def test_missed_message_worker(self) -> None:
        cordelia = self.example_user('cordelia')
        hamlet = self.example_user('hamlet')
        othello = self.example_user('othello')

        hamlet1_msg_id = self.send_personal_message(
            from_email=cordelia.email,
            to_email=hamlet.email,
            content='hi hamlet',
        )

        hamlet2_msg_id = self.send_personal_message(
            from_email=cordelia.email,
            to_email=hamlet.email,
            content='goodbye hamlet',
        )

        hamlet3_msg_id = self.send_personal_message(
            from_email=cordelia.email,
            to_email=hamlet.email,
            content='hello again hamlet',
        )

        othello_msg_id = self.send_personal_message(
            from_email=cordelia.email,
            to_email=othello.email,
            content='where art thou, othello?',
        )

        events = [
            dict(user_profile_id=hamlet.id, message_id=hamlet1_msg_id),
            dict(user_profile_id=hamlet.id, message_id=hamlet2_msg_id),
            dict(user_profile_id=othello.id, message_id=othello_msg_id),
        ]

        fake_client = self.FakeClient()
        for event in events:
            fake_client.queue.append(('missedmessage_emails', event))

        mmw = MissedMessageWorker()

        class MockTimer():
            is_running = False

            def is_alive(self) -> bool:
                return self.is_running

            def start(self) -> None:
                self.is_running = True

            def cancel(self) -> None:
                self.is_running = False

        timer = MockTimer()
        time_mock = patch(
            'zerver.worker.queue_processors.Timer',
            return_value=timer,
        )

        send_mock = patch(
            'zerver.lib.email_notifications.do_send_missedmessage_events_reply_in_zulip'
        )
        mmw.BATCH_DURATION = 0

        bonus_event = dict(user_profile_id=hamlet.id, message_id=hamlet3_msg_id)

        with send_mock as sm, time_mock as tm:
            with simulated_queue_client(lambda: fake_client):
                self.assertFalse(timer.is_alive())
                mmw.setup()
                mmw.start()
                self.assertTrue(timer.is_alive())
                fake_client.queue.append(('missedmessage_emails', bonus_event))

                # Double-calling start is our way to get it to run again
                self.assertTrue(timer.is_alive())
                mmw.start()

                # Now, we actually send the emails.
                mmw.maybe_send_batched_emails()
                self.assertFalse(timer.is_alive())

        self.assertEqual(tm.call_args[0][0], 5)  # should sleep 5 seconds

        args = [c[0] for c in sm.call_args_list]
        arg_dict = {
            arg[0].id: dict(
                missed_messages=arg[1],
                count=arg[2],
            )
            for arg in args
        }

        hamlet_info = arg_dict[hamlet.id]
        self.assertEqual(hamlet_info['count'], 3)
        self.assertEqual(
            {m['message'].content for m in hamlet_info['missed_messages']},
            {'hi hamlet', 'goodbye hamlet', 'hello again hamlet'},
        )

        othello_info = arg_dict[othello.id]
        self.assertEqual(othello_info['count'], 1)
        self.assertEqual(
            {m['message'].content for m in othello_info['missed_messages']},
            {'where art thou, othello?'}
        )
示例#12
0
    def test_push_notifications_worker(self) -> None:
        """
        The push notifications system has its own comprehensive test suite,
        so we can limit ourselves to simple unit testing the queue processor,
        without going deeper into the system - by mocking the handle_push_notification
        functions to immediately produce the effect we want, to test its handling by the queue
        processor.
        """
        fake_client = self.FakeClient()

        def fake_publish(queue_name: str, event: Dict[str, Any],
                         processor: Callable[[Any], None]) -> None:
            fake_client.queue.append((queue_name, event))

        def generate_new_message_notification() -> Dict[str, Any]:
            return build_offline_notification(1, 1)

        def generate_remove_notification() -> Dict[str, Any]:
            return {
                "type": "remove",
                "user_profile_id": 1,
                "message_ids": [1],
            }

        with simulated_queue_client(lambda: fake_client):
            worker = queue_processors.PushNotificationsWorker()
            worker.setup()
            with patch('zerver.worker.queue_processors.handle_push_notification') as mock_handle_new, \
                    patch('zerver.worker.queue_processors.handle_remove_push_notification') as mock_handle_remove, \
                    patch('zerver.worker.queue_processors.initialize_push_notifications'):
                event_new = generate_new_message_notification()
                event_remove = generate_remove_notification()
                fake_client.queue.append(
                    ('missedmessage_mobile_notifications', event_new))
                fake_client.queue.append(
                    ('missedmessage_mobile_notifications', event_remove))

                worker.start()
                mock_handle_new.assert_called_once_with(
                    event_new['user_profile_id'], event_new)
                mock_handle_remove.assert_called_once_with(
                    event_remove['user_profile_id'],
                    event_remove['message_ids'])

            with patch('zerver.worker.queue_processors.handle_push_notification',
                       side_effect=PushNotificationBouncerRetryLaterError("test")) as mock_handle_new, \
                    patch('zerver.worker.queue_processors.handle_remove_push_notification',
                          side_effect=PushNotificationBouncerRetryLaterError("test")) as mock_handle_remove, \
                    patch('zerver.worker.queue_processors.initialize_push_notifications'):
                event_new = generate_new_message_notification()
                event_remove = generate_remove_notification()
                fake_client.queue.append(
                    ('missedmessage_mobile_notifications', event_new))
                fake_client.queue.append(
                    ('missedmessage_mobile_notifications', event_remove))

                with patch('zerver.lib.queue.queue_json_publish',
                           side_effect=fake_publish):
                    worker.start()
                    self.assertEqual(mock_handle_new.call_count,
                                     1 + MAX_REQUEST_RETRIES)
                    self.assertEqual(mock_handle_remove.call_count,
                                     1 + MAX_REQUEST_RETRIES)
示例#13
0
    def test_UserActivityWorker(self) -> None:
        fake_client = self.FakeClient()

        user = self.example_user('hamlet')
        UserActivity.objects.filter(
            user_profile = user.id,
            client = get_client('ios'),
        ).delete()

        data = dict(
            user_profile_id = user.id,
            client_id = get_client('ios').id,
            time = time.time(),
            query = 'send_message',
        )
        fake_client.queue.append(('user_activity', data))

        # The block below adds an event using the old format,
        # having the client name instead of id, to test the queue
        # worker handles it correctly. That compatibility code can
        # be deleted in a later release, and this test should then be cleaned up.
        data_old_format = dict(
            user_profile_id = user.id,
            client = 'ios',
            time = time.time(),
            query = 'send_message',
        )
        fake_client.queue.append(('user_activity', data_old_format))

        with loopworker_sleep_mock:
            with simulated_queue_client(lambda: fake_client):
                worker = queue_processors.UserActivityWorker()
                worker.setup()
                try:
                    worker.start()
                except AbortLoop:
                    pass
                activity_records = UserActivity.objects.filter(
                    user_profile = user.id,
                    client = get_client('ios'),
                )
                self.assertEqual(len(activity_records), 1)
                self.assertEqual(activity_records[0].count, 2)

        # Now process the event a second time and confirm count goes
        # up. Ideally, we'd use an event with a slightly newer
        # time, but it's not really important.
        fake_client.queue.append(('user_activity', data))
        with loopworker_sleep_mock:
            with simulated_queue_client(lambda: fake_client):
                worker = queue_processors.UserActivityWorker()
                worker.setup()
                try:
                    worker.start()
                except AbortLoop:
                    pass
                activity_records = UserActivity.objects.filter(
                    user_profile = user.id,
                    client = get_client('ios'),
                )
                self.assertEqual(len(activity_records), 1)
                self.assertEqual(activity_records[0].count, 3)
示例#14
0
    def test_missed_message_worker(self) -> None:
        cordelia = self.example_user('cordelia')
        hamlet = self.example_user('hamlet')
        othello = self.example_user('othello')

        hamlet1_msg_id = self.send_personal_message(
            from_email=cordelia.email,
            to_email=hamlet.email,
            content='hi hamlet',
        )

        hamlet2_msg_id = self.send_personal_message(
            from_email=cordelia.email,
            to_email=hamlet.email,
            content='goodbye hamlet',
        )

        othello_msg_id = self.send_personal_message(
            from_email=cordelia.email,
            to_email=othello.email,
            content='where art thou, othello?',
        )

        events = [
            dict(user_profile_id=hamlet.id, message_id=hamlet1_msg_id),
            dict(user_profile_id=hamlet.id, message_id=hamlet2_msg_id),
            dict(user_profile_id=othello.id, message_id=othello_msg_id),
        ]

        fake_client = self.FakeClient()
        for event in events:
            fake_client.queue.append(('missedmessage_emails', event))

        mmw = MissedMessageWorker()

        time_mock = patch(
            'zerver.worker.queue_processors.time.sleep',
            side_effect=AbortLoop,
        )

        send_mock = patch(
            'zerver.lib.notifications.do_send_missedmessage_events_reply_in_zulip'
        )

        with send_mock as sm, time_mock as tm:
            with simulated_queue_client(lambda: fake_client):
                try:
                    mmw.setup()
                    mmw.start()
                except AbortLoop:
                    pass

        self.assertEqual(tm.call_args[0][0], 120)  # should sleep two minutes

        args = [c[0] for c in sm.call_args_list]
        arg_dict = {
            arg[0].id: dict(
                missed_messages=arg[1],
                count=arg[2],
            )
            for arg in args
        }

        hamlet_info = arg_dict[hamlet.id]
        self.assertEqual(hamlet_info['count'], 2)
        self.assertEqual(
            {m.content for m in hamlet_info['missed_messages']},
            {'hi hamlet', 'goodbye hamlet'},
        )

        othello_info = arg_dict[othello.id]
        self.assertEqual(othello_info['count'], 1)
        self.assertEqual(
            {m.content for m in othello_info['missed_messages']},
            {'where art thou, othello?'}
        )
示例#15
0
    def test_missed_message_worker(self) -> None:
        cordelia = self.example_user("cordelia")
        hamlet = self.example_user("hamlet")
        othello = self.example_user("othello")

        hamlet1_msg_id = self.send_personal_message(
            from_user=cordelia,
            to_user=hamlet,
            content="hi hamlet",
        )

        hamlet2_msg_id = self.send_personal_message(
            from_user=cordelia,
            to_user=hamlet,
            content="goodbye hamlet",
        )

        hamlet3_msg_id = self.send_personal_message(
            from_user=cordelia,
            to_user=hamlet,
            content="hello again hamlet",
        )

        othello_msg_id = self.send_personal_message(
            from_user=cordelia,
            to_user=othello,
            content="where art thou, othello?",
        )

        hamlet_event1 = dict(
            user_profile_id=hamlet.id,
            message_id=hamlet1_msg_id,
            trigger=NotificationTriggers.PRIVATE_MESSAGE,
        )
        hamlet_event2 = dict(
            user_profile_id=hamlet.id,
            message_id=hamlet2_msg_id,
            trigger=NotificationTriggers.PRIVATE_MESSAGE,
            mentioned_user_group_id=4,
        )
        othello_event = dict(
            user_profile_id=othello.id,
            message_id=othello_msg_id,
            trigger=NotificationTriggers.PRIVATE_MESSAGE,
        )

        events = [hamlet_event1, hamlet_event2, othello_event]

        fake_client = self.FakeClient()
        for event in events:
            fake_client.enqueue("missedmessage_emails", event)

        mmw = MissedMessageWorker()
        batch_duration = datetime.timedelta(
            seconds=hamlet.email_notifications_batching_period_seconds)
        assert (hamlet.email_notifications_batching_period_seconds ==
                othello.email_notifications_batching_period_seconds)

        class MockTimer:
            is_running = False

            def is_alive(self) -> bool:
                return self.is_running

            def start(self) -> None:
                self.is_running = True

        timer = MockTimer()
        timer_mock = patch(
            "zerver.worker.queue_processors.Timer",
            return_value=timer,
        )

        send_mock = patch(
            "zerver.lib.email_notifications.do_send_missedmessage_events_reply_in_zulip",
        )

        bonus_event_hamlet = dict(
            user_profile_id=hamlet.id,
            message_id=hamlet3_msg_id,
            trigger=NotificationTriggers.PRIVATE_MESSAGE,
        )

        def check_row(
            row: ScheduledMessageNotificationEmail,
            scheduled_timestamp: datetime.datetime,
            mentioned_user_group_id: Optional[int],
        ) -> None:
            self.assertEqual(row.trigger, NotificationTriggers.PRIVATE_MESSAGE)
            self.assertEqual(row.scheduled_timestamp, scheduled_timestamp)
            self.assertEqual(row.mentioned_user_group_id,
                             mentioned_user_group_id)

        with send_mock as sm, timer_mock as tm:
            with simulated_queue_client(lambda: fake_client):
                self.assertFalse(timer.is_alive())

                time_zero = datetime.datetime(2021,
                                              1,
                                              1,
                                              tzinfo=datetime.timezone.utc)
                expected_scheduled_timestamp = time_zero + batch_duration
                with patch("zerver.worker.queue_processors.timezone_now",
                           return_value=time_zero):
                    mmw.setup()
                    mmw.start()

                    # The events should be saved in the database
                    hamlet_row1 = ScheduledMessageNotificationEmail.objects.get(
                        user_profile_id=hamlet.id, message_id=hamlet1_msg_id)
                    check_row(hamlet_row1, expected_scheduled_timestamp, None)

                    hamlet_row2 = ScheduledMessageNotificationEmail.objects.get(
                        user_profile_id=hamlet.id, message_id=hamlet2_msg_id)
                    check_row(hamlet_row2, expected_scheduled_timestamp, 4)

                    othello_row1 = ScheduledMessageNotificationEmail.objects.get(
                        user_profile_id=othello.id, message_id=othello_msg_id)
                    check_row(othello_row1, expected_scheduled_timestamp, None)

                    # Additionally, the timer should have be started
                    self.assertTrue(timer.is_alive())

                # If another event is received, test that it gets saved with the same
                # `expected_scheduled_timestamp` as the earlier events.
                fake_client.enqueue("missedmessage_emails", bonus_event_hamlet)
                self.assertTrue(timer.is_alive())
                few_moments_later = time_zero + datetime.timedelta(seconds=3)
                with patch("zerver.worker.queue_processors.timezone_now",
                           return_value=few_moments_later):
                    # Double-calling start is our way to get it to run again
                    mmw.start()
                    hamlet_row3 = ScheduledMessageNotificationEmail.objects.get(
                        user_profile_id=hamlet.id, message_id=hamlet3_msg_id)
                    check_row(hamlet_row3, expected_scheduled_timestamp, None)

                # Now let us test `maybe_send_batched_emails`
                # If called too early, it shouldn't process the emails.
                one_minute_premature = expected_scheduled_timestamp - datetime.timedelta(
                    seconds=60)
                with patch("zerver.worker.queue_processors.timezone_now",
                           return_value=one_minute_premature):
                    mmw.maybe_send_batched_emails()
                    self.assertEqual(
                        ScheduledMessageNotificationEmail.objects.count(), 4)

                # If called after `expected_scheduled_timestamp`, it should process all emails.
                one_minute_overdue = expected_scheduled_timestamp + datetime.timedelta(
                    seconds=60)
                with self.assertLogs(level="INFO") as info_logs, patch(
                        "zerver.worker.queue_processors.timezone_now",
                        return_value=one_minute_overdue):
                    mmw.maybe_send_batched_emails()
                    self.assertEqual(
                        ScheduledMessageNotificationEmail.objects.count(), 0)

                    self.assert_length(info_logs.output, 2)
                    self.assertIn(
                        f"INFO:root:Batch-processing 3 missedmessage_emails events for user {hamlet.id}",
                        info_logs.output,
                    )
                    self.assertIn(
                        f"INFO:root:Batch-processing 1 missedmessage_emails events for user {othello.id}",
                        info_logs.output,
                    )

                    # All batches got processed. Verify that the timer isn't running.
                    self.assertEqual(mmw.timer_event, None)

                # Hacky test coming up! We want to test the try-except block in the consumer which handles
                # IntegrityErrors raised when the message was deleted before it processed the notification
                # event.
                # However, Postgres defers checking ForeignKey constraints to when the current transaction
                # commits. This poses some difficulties in testing because of Django running tests inside a
                # transaction which never commits. See https://code.djangoproject.com/ticket/22431 for more
                # details, but the summary is that IntegrityErrors due to database constraints are raised at
                # the end of the test, not inside the `try` block. So, we have the code inside the `try` block
                # raise `IntegrityError` by mocking.
                def raise_error(**kwargs: Any) -> None:
                    raise IntegrityError

                fake_client.enqueue("missedmessage_emails", hamlet_event1)

                with patch(
                        "zerver.models.ScheduledMessageNotificationEmail.objects.create",
                        side_effect=raise_error,
                ), self.assertLogs(level="DEBUG") as debug_logs:
                    mmw.start()
                    self.assertIn(
                        "DEBUG:root:ScheduledMessageNotificationEmail row could not be created. The message may have been deleted. Skipping event.",
                        debug_logs.output,
                    )

        # Check that the frequency of calling maybe_send_batched_emails is correct (5 seconds)
        self.assertEqual(tm.call_args[0][0], 5)

        # Verify the payloads now
        args = [c[0] for c in sm.call_args_list]
        arg_dict = {
            arg[0].id: dict(
                missed_messages=arg[1],
                count=arg[2],
            )
            for arg in args
        }

        hamlet_info = arg_dict[hamlet.id]
        self.assertEqual(hamlet_info["count"], 3)
        self.assertEqual(
            {m["message"].content
             for m in hamlet_info["missed_messages"]},
            {"hi hamlet", "goodbye hamlet", "hello again hamlet"},
        )

        othello_info = arg_dict[othello.id]
        self.assertEqual(othello_info["count"], 1)
        self.assertEqual(
            {m["message"].content
             for m in othello_info["missed_messages"]},
            {"where art thou, othello?"},
        )
示例#16
0
    def test_error_handling(self) -> None:
        processed = []

        @queue_processors.assign_queue('unreliable_worker', is_test_queue=True)
        class UnreliableWorker(queue_processors.QueueProcessingWorker):
            def consume(self, data: Mapping[str, Any]) -> None:
                if data["type"] == 'unexpected behaviour':
                    raise Exception('Worker task not performing as expected!')
                processed.append(data["type"])

        fake_client = self.FakeClient()
        for msg in ['good', 'fine', 'unexpected behaviour', 'back to normal']:
            fake_client.enqueue('unreliable_worker', {'type': msg})

        fn = os.path.join(settings.QUEUE_ERROR_DIR, 'unreliable_worker.errors')
        try:
            os.remove(fn)
        except OSError:  # nocoverage # error handling for the directory not existing
            pass

        with simulated_queue_client(lambda: fake_client):
            worker = UnreliableWorker()
            worker.setup()
            with patch('logging.exception') as logging_exception_mock:
                worker.start()
                logging_exception_mock.assert_called_once_with(
                    "Problem handling data on queue %s",
                    "unreliable_worker",
                    stack_info=True,
                )

        self.assertEqual(processed, ['good', 'fine', 'back to normal'])
        with open(fn) as f:
            line = f.readline().strip()
        events = orjson.loads(line.split('\t')[1])
        self.assert_length(events, 1)
        event = events[0]
        self.assertEqual(event["type"], 'unexpected behaviour')

        processed = []

        @queue_processors.assign_queue('unreliable_loopworker',
                                       is_test_queue=True)
        class UnreliableLoopWorker(queue_processors.LoopQueueProcessingWorker):
            def consume_batch(self, events: List[Dict[str, Any]]) -> None:
                for event in events:
                    if event["type"] == 'unexpected behaviour':
                        raise Exception(
                            'Worker task not performing as expected!')
                    processed.append(event["type"])

        for msg in ['good', 'fine', 'unexpected behaviour', 'back to normal']:
            fake_client.enqueue('unreliable_loopworker', {'type': msg})

        fn = os.path.join(settings.QUEUE_ERROR_DIR,
                          'unreliable_loopworker.errors')
        try:
            os.remove(fn)
        except OSError:  # nocoverage # error handling for the directory not existing
            pass

        with simulated_queue_client(lambda: fake_client):
            loopworker = UnreliableLoopWorker()
            loopworker.setup()
            with patch('logging.exception') as logging_exception_mock:
                loopworker.start()
                logging_exception_mock.assert_called_once_with(
                    "Problem handling data on queue %s",
                    "unreliable_loopworker",
                    stack_info=True,
                )

        self.assertEqual(processed, ['good', 'fine'])
        with open(fn) as f:
            line = f.readline().strip()
        events = orjson.loads(line.split('\t')[1])
        self.assert_length(events, 4)

        self.assertEqual(
            [event["type"] for event in events],
            ['good', 'fine', 'unexpected behaviour', 'back to normal'])
示例#17
0
    def test_missed_message_worker(self) -> None:
        cordelia = self.example_user('cordelia')
        hamlet = self.example_user('hamlet')
        othello = self.example_user('othello')

        hamlet1_msg_id = self.send_personal_message(
            from_email=cordelia.email,
            to_email=hamlet.email,
            content='hi hamlet',
        )

        hamlet2_msg_id = self.send_personal_message(
            from_email=cordelia.email,
            to_email=hamlet.email,
            content='goodbye hamlet',
        )

        othello_msg_id = self.send_personal_message(
            from_email=cordelia.email,
            to_email=othello.email,
            content='where art thou, othello?',
        )

        events = [
            dict(user_profile_id=hamlet.id, message_id=hamlet1_msg_id),
            dict(user_profile_id=hamlet.id, message_id=hamlet2_msg_id),
            dict(user_profile_id=othello.id, message_id=othello_msg_id),
        ]

        fake_client = self.FakeClient()
        for event in events:
            fake_client.queue.append(('missedmessage_emails', event))

        mmw = MissedMessageWorker()

        time_mock = patch(
            'zerver.worker.queue_processors.time.sleep',
            side_effect=AbortLoop,
        )

        send_mock = patch(
            'zerver.lib.notifications.do_send_missedmessage_events_reply_in_zulip'
        )

        with send_mock as sm, time_mock as tm:
            with simulated_queue_client(lambda: fake_client):
                try:
                    mmw.setup()
                    mmw.start()
                except AbortLoop:
                    pass

        self.assertEqual(tm.call_args[0][0], 120)  # should sleep two minutes

        args = [c[0] for c in sm.call_args_list]
        arg_dict = {
            arg[0].id: dict(
                missed_messages=arg[1],
                count=arg[2],
            )
            for arg in args
        }

        hamlet_info = arg_dict[hamlet.id]
        self.assertEqual(hamlet_info['count'], 2)
        self.assertEqual(
            {m.content
             for m in hamlet_info['missed_messages']},
            {'hi hamlet', 'goodbye hamlet'},
        )

        othello_info = arg_dict[othello.id]
        self.assertEqual(othello_info['count'], 1)
        self.assertEqual({m.content
                          for m in othello_info['missed_messages']},
                         {'where art thou, othello?'})
示例#18
0
    def test_error_handling(self) -> None:
        processed = []

        @queue_processors.assign_queue("unreliable_worker", is_test_queue=True)
        class UnreliableWorker(queue_processors.QueueProcessingWorker):
            def consume(self, data: Mapping[str, Any]) -> None:
                if data["type"] == "unexpected behaviour":
                    raise Exception("Worker task not performing as expected!")
                processed.append(data["type"])

        fake_client = self.FakeClient()
        for msg in ["good", "fine", "unexpected behaviour", "back to normal"]:
            fake_client.enqueue("unreliable_worker", {"type": msg})

        fn = os.path.join(settings.QUEUE_ERROR_DIR, "unreliable_worker.errors")
        try:
            os.remove(fn)
        except OSError:  # nocoverage # error handling for the directory not existing
            pass

        with simulated_queue_client(lambda: fake_client):
            worker = UnreliableWorker()
            worker.setup()
            with self.assertLogs(level="ERROR") as m:
                worker.start()
                self.assertEqual(
                    m.records[0].message, "Problem handling data on queue unreliable_worker"
                )
                self.assertIn(m.records[0].stack_info, m.output[0])

        self.assertEqual(processed, ["good", "fine", "back to normal"])
        with open(fn) as f:
            line = f.readline().strip()
        events = orjson.loads(line.split("\t")[1])
        self.assert_length(events, 1)
        event = events[0]
        self.assertEqual(event["type"], "unexpected behaviour")

        processed = []

        @queue_processors.assign_queue("unreliable_loopworker", is_test_queue=True)
        class UnreliableLoopWorker(queue_processors.LoopQueueProcessingWorker):
            def consume_batch(self, events: List[Dict[str, Any]]) -> None:
                for event in events:
                    if event["type"] == "unexpected behaviour":
                        raise Exception("Worker task not performing as expected!")
                    processed.append(event["type"])

        for msg in ["good", "fine", "unexpected behaviour", "back to normal"]:
            fake_client.enqueue("unreliable_loopworker", {"type": msg})

        fn = os.path.join(settings.QUEUE_ERROR_DIR, "unreliable_loopworker.errors")
        try:
            os.remove(fn)
        except OSError:  # nocoverage # error handling for the directory not existing
            pass

        with simulated_queue_client(lambda: fake_client):
            loopworker = UnreliableLoopWorker()
            loopworker.setup()
            with self.assertLogs(level="ERROR") as m:
                loopworker.start()
                self.assertEqual(
                    m.records[0].message, "Problem handling data on queue unreliable_loopworker"
                )
                self.assertIn(m.records[0].stack_info, m.output[0])

        self.assertEqual(processed, ["good", "fine"])
        with open(fn) as f:
            line = f.readline().strip()
        events = orjson.loads(line.split("\t")[1])
        self.assert_length(events, 4)

        self.assertEqual(
            [event["type"] for event in events],
            ["good", "fine", "unexpected behaviour", "back to normal"],
        )
示例#19
0
    def test_mirror_worker_rate_limiting(self, mock_mirror_email: MagicMock,
                                         mock_warn: MagicMock) -> None:
        fake_client = self.FakeClient()
        realm = get_realm('zulip')
        RateLimitedRealmMirror(realm).clear_history()
        stream = get_stream('Denmark', realm)
        stream_to_address = encode_email_address(stream)
        data = [
            dict(
                msg_base64=base64.b64encode(b'\xf3test').decode(),
                time=time.time(),
                rcpt_to=stream_to_address,
            ),
        ] * 5
        for element in data:
            fake_client.enqueue('email_mirror', element)

        with simulated_queue_client(lambda: fake_client), \
                self.assertLogs('zerver.worker.queue_processors', level='WARNING') as warn_logs:
            start_time = time.time()
            with patch('time.time', return_value=start_time):
                worker = queue_processors.MirrorWorker()
                worker.setup()
                worker.start()
                # Of the first 5 messages, only 2 should be processed
                # (the rest being rate-limited):
                self.assertEqual(mock_mirror_email.call_count, 2)

                # If a new message is sent into the stream mirror, it will get rejected:
                fake_client.enqueue('email_mirror', data[0])
                worker.start()
                self.assertEqual(mock_mirror_email.call_count, 2)

                # However, missed message emails don't get rate limited:
                with self.settings(EMAIL_GATEWAY_PATTERN="*****@*****.**"):
                    address = 'mm' + ('x' * 32) + '@example.com'
                    event = dict(
                        msg_base64=base64.b64encode(b'\xf3test').decode(),
                        time=time.time(),
                        rcpt_to=address,
                    )
                    fake_client.enqueue('email_mirror', event)
                    worker.start()
                    self.assertEqual(mock_mirror_email.call_count, 3)

            # After some times passes, emails get accepted again:
            with patch('time.time', return_value=(start_time + 11.0)):
                fake_client.enqueue('email_mirror', data[0])
                worker.start()
                self.assertEqual(mock_mirror_email.call_count, 4)

                # If RateLimiterLockingException is thrown, we rate-limit the new message:
                with patch(
                        'zerver.lib.rate_limiter.RedisRateLimiterBackend.incr_ratelimit',
                        side_effect=RateLimiterLockingException):
                    fake_client.enqueue('email_mirror', data[0])
                    worker.start()
                    self.assertEqual(mock_mirror_email.call_count, 4)
                    mock_warn.assert_called_with(
                        "Deadlock trying to incr_ratelimit for %s",
                        f"RateLimitedRealmMirror:{realm.string_id}",
                    )
        self.assertEqual(warn_logs.output, [
            'WARNING:zerver.worker.queue_processors:MirrorWorker: Rejecting an email from: None to realm: Zulip Dev - rate limited.'
        ] * 5)
示例#20
0
    def test_push_notifications_worker(self) -> None:
        """
        The push notifications system has its own comprehensive test suite,
        so we can limit ourselves to simple unit testing the queue processor,
        without going deeper into the system - by mocking the handle_push_notification
        functions to immediately produce the effect we want, to test its handling by the queue
        processor.
        """
        fake_client = self.FakeClient()

        def fake_publish(
            queue_name: str, event: Dict[str, Any], processor: Callable[[Any], None]
        ) -> None:
            fake_client.enqueue(queue_name, event)

        def generate_new_message_notification() -> Dict[str, Any]:
            return build_offline_notification(1, 1)

        def generate_remove_notification() -> Dict[str, Any]:
            return {
                "type": "remove",
                "user_profile_id": 1,
                "message_ids": [1],
            }

        with simulated_queue_client(lambda: fake_client):
            worker = queue_processors.PushNotificationsWorker()
            worker.setup()
            with patch(
                "zerver.worker.queue_processors.handle_push_notification"
            ) as mock_handle_new, patch(
                "zerver.worker.queue_processors.handle_remove_push_notification"
            ) as mock_handle_remove, patch(
                "zerver.worker.queue_processors.initialize_push_notifications"
            ):
                event_new = generate_new_message_notification()
                event_remove = generate_remove_notification()
                fake_client.enqueue("missedmessage_mobile_notifications", event_new)
                fake_client.enqueue("missedmessage_mobile_notifications", event_remove)

                worker.start()
                mock_handle_new.assert_called_once_with(event_new["user_profile_id"], event_new)
                mock_handle_remove.assert_called_once_with(
                    event_remove["user_profile_id"], event_remove["message_ids"]
                )

            with patch(
                "zerver.worker.queue_processors.handle_push_notification",
                side_effect=PushNotificationBouncerRetryLaterError("test"),
            ) as mock_handle_new, patch(
                "zerver.worker.queue_processors.handle_remove_push_notification",
                side_effect=PushNotificationBouncerRetryLaterError("test"),
            ) as mock_handle_remove, patch(
                "zerver.worker.queue_processors.initialize_push_notifications"
            ):
                event_new = generate_new_message_notification()
                event_remove = generate_remove_notification()
                fake_client.enqueue("missedmessage_mobile_notifications", event_new)
                fake_client.enqueue("missedmessage_mobile_notifications", event_remove)

                with mock_queue_publish(
                    "zerver.lib.queue.queue_json_publish", side_effect=fake_publish
                ), self.assertLogs("zerver.worker.queue_processors", "WARNING") as warn_logs:
                    worker.start()
                    self.assertEqual(mock_handle_new.call_count, 1 + MAX_REQUEST_RETRIES)
                    self.assertEqual(mock_handle_remove.call_count, 1 + MAX_REQUEST_RETRIES)
                self.assertEqual(
                    warn_logs.output,
                    [
                        "WARNING:zerver.worker.queue_processors:Maximum retries exceeded for trigger:1 event:push_notification",
                    ]
                    * 2,
                )
示例#21
0
    def test_missed_message_worker(self) -> None:
        cordelia = self.example_user("cordelia")
        hamlet = self.example_user("hamlet")
        othello = self.example_user("othello")

        hamlet1_msg_id = self.send_personal_message(
            from_user=cordelia,
            to_user=hamlet,
            content="hi hamlet",
        )

        hamlet2_msg_id = self.send_personal_message(
            from_user=cordelia,
            to_user=hamlet,
            content="goodbye hamlet",
        )

        hamlet3_msg_id = self.send_personal_message(
            from_user=cordelia,
            to_user=hamlet,
            content="hello again hamlet",
        )

        othello_msg_id = self.send_personal_message(
            from_user=cordelia,
            to_user=othello,
            content="where art thou, othello?",
        )

        events = [
            dict(user_profile_id=hamlet.id, message_id=hamlet1_msg_id),
            dict(user_profile_id=hamlet.id, message_id=hamlet2_msg_id),
            dict(user_profile_id=othello.id, message_id=othello_msg_id),
        ]

        fake_client = self.FakeClient()
        for event in events:
            fake_client.enqueue("missedmessage_emails", event)

        mmw = MissedMessageWorker()

        class MockTimer:
            is_running = False

            def is_alive(self) -> bool:
                return self.is_running

            def start(self) -> None:
                self.is_running = True

        timer = MockTimer()
        timer_mock = patch(
            "zerver.worker.queue_processors.Timer",
            return_value=timer,
        )

        send_mock = patch(
            "zerver.lib.email_notifications.do_send_missedmessage_events_reply_in_zulip",
        )
        mmw.BATCH_DURATION = 0

        bonus_event = dict(user_profile_id=hamlet.id, message_id=hamlet3_msg_id)

        with send_mock as sm, timer_mock as tm:
            with simulated_queue_client(lambda: fake_client):
                self.assertFalse(timer.is_alive())
                mmw.setup()
                mmw.start()
                self.assertTrue(timer.is_alive())
                fake_client.enqueue("missedmessage_emails", bonus_event)

                # Double-calling start is our way to get it to run again
                self.assertTrue(timer.is_alive())
                mmw.start()
                with self.assertLogs(level="INFO") as info_logs:
                    # Now, we actually send the emails.
                    mmw.maybe_send_batched_emails()
                self.assertEqual(
                    info_logs.output,
                    [
                        "INFO:root:Batch-processing 3 missedmessage_emails events for user 10",
                        "INFO:root:Batch-processing 1 missedmessage_emails events for user 12",
                    ],
                )

                self.assertEqual(mmw.timer_event, None)

        self.assertEqual(tm.call_args[0][0], 5)  # should sleep 5 seconds

        args = [c[0] for c in sm.call_args_list]
        arg_dict = {
            arg[0].id: dict(
                missed_messages=arg[1],
                count=arg[2],
            )
            for arg in args
        }

        hamlet_info = arg_dict[hamlet.id]
        self.assertEqual(hamlet_info["count"], 3)
        self.assertEqual(
            {m["message"].content for m in hamlet_info["missed_messages"]},
            {"hi hamlet", "goodbye hamlet", "hello again hamlet"},
        )

        othello_info = arg_dict[othello.id]
        self.assertEqual(othello_info["count"], 1)
        self.assertEqual(
            {m["message"].content for m in othello_info["missed_messages"]},
            {"where art thou, othello?"},
        )
示例#22
0
    def test_missed_message_worker(self) -> None:
        cordelia = self.example_user("cordelia")
        hamlet = self.example_user("hamlet")
        othello = self.example_user("othello")

        hamlet1_msg_id = self.send_personal_message(
            from_user=cordelia,
            to_user=hamlet,
            content="hi hamlet",
        )

        hamlet2_msg_id = self.send_personal_message(
            from_user=cordelia,
            to_user=hamlet,
            content="goodbye hamlet",
        )

        hamlet3_msg_id = self.send_personal_message(
            from_user=cordelia,
            to_user=hamlet,
            content="hello again hamlet",
        )

        othello_msg_id = self.send_personal_message(
            from_user=cordelia,
            to_user=othello,
            content="where art thou, othello?",
        )

        hamlet_event1 = dict(
            user_profile_id=hamlet.id,
            message_id=hamlet1_msg_id,
            trigger=NotificationTriggers.PRIVATE_MESSAGE,
        )
        hamlet_event2 = dict(
            user_profile_id=hamlet.id,
            message_id=hamlet2_msg_id,
            trigger=NotificationTriggers.PRIVATE_MESSAGE,
            mentioned_user_group_id=4,
        )
        othello_event = dict(
            user_profile_id=othello.id,
            message_id=othello_msg_id,
            trigger=NotificationTriggers.PRIVATE_MESSAGE,
        )

        events = [hamlet_event1, hamlet_event2, othello_event]

        fake_client = self.FakeClient()
        for event in events:
            fake_client.enqueue("missedmessage_emails", event)

        mmw = MissedMessageWorker()
        batch_duration = datetime.timedelta(seconds=mmw.BATCH_DURATION)

        class MockTimer:
            is_running = False

            def is_alive(self) -> bool:
                return self.is_running

            def start(self) -> None:
                self.is_running = True

        timer = MockTimer()
        timer_mock = patch(
            "zerver.worker.queue_processors.Timer",
            return_value=timer,
        )

        send_mock = patch(
            "zerver.lib.email_notifications.do_send_missedmessage_events_reply_in_zulip",
        )

        bonus_event_hamlet = dict(
            user_profile_id=hamlet.id,
            message_id=hamlet3_msg_id,
            trigger=NotificationTriggers.PRIVATE_MESSAGE,
        )

        def check_row(
            row: ScheduledMessageNotificationEmail,
            scheduled_timestamp: datetime.datetime,
            mentioned_user_group_id: Optional[int],
        ) -> None:
            self.assertEqual(row.trigger, NotificationTriggers.PRIVATE_MESSAGE)
            self.assertEqual(row.scheduled_timestamp, scheduled_timestamp)
            self.assertEqual(row.mentioned_user_group_id,
                             mentioned_user_group_id)

        with send_mock as sm, timer_mock as tm:
            with simulated_queue_client(lambda: fake_client):
                self.assertFalse(timer.is_alive())

                time_zero = datetime.datetime(2021,
                                              1,
                                              1,
                                              tzinfo=datetime.timezone.utc)
                expected_scheduled_timestamp = time_zero + batch_duration
                with patch("zerver.worker.queue_processors.timezone_now",
                           return_value=time_zero):
                    mmw.setup()
                    mmw.start()

                    # The events should be saved in the database
                    hamlet_row1 = ScheduledMessageNotificationEmail.objects.get(
                        user_profile_id=hamlet.id, message_id=hamlet1_msg_id)
                    check_row(hamlet_row1, expected_scheduled_timestamp, None)

                    hamlet_row2 = ScheduledMessageNotificationEmail.objects.get(
                        user_profile_id=hamlet.id, message_id=hamlet2_msg_id)
                    check_row(hamlet_row2, expected_scheduled_timestamp, 4)

                    othello_row1 = ScheduledMessageNotificationEmail.objects.get(
                        user_profile_id=othello.id, message_id=othello_msg_id)
                    check_row(othello_row1, expected_scheduled_timestamp, None)

                    # Additionally, the timer should have be started
                    self.assertTrue(timer.is_alive())

                # If another event is received, test that it gets saved with the same
                # `expected_scheduled_timestamp` as the earlier events.
                fake_client.enqueue("missedmessage_emails", bonus_event_hamlet)
                self.assertTrue(timer.is_alive())
                few_moments_later = time_zero + datetime.timedelta(seconds=3)
                with patch("zerver.worker.queue_processors.timezone_now",
                           return_value=few_moments_later):
                    # Double-calling start is our way to get it to run again
                    mmw.start()
                    hamlet_row3 = ScheduledMessageNotificationEmail.objects.get(
                        user_profile_id=hamlet.id, message_id=hamlet3_msg_id)
                    check_row(hamlet_row3, expected_scheduled_timestamp, None)

                # Now let us test `maybe_send_batched_emails`
                # If called too early, it shouldn't process the emails.
                one_minute_premature = expected_scheduled_timestamp - datetime.timedelta(
                    seconds=60)
                with patch("zerver.worker.queue_processors.timezone_now",
                           return_value=one_minute_premature):
                    mmw.maybe_send_batched_emails()
                    self.assertEqual(
                        ScheduledMessageNotificationEmail.objects.count(), 4)

                # If called after `expected_scheduled_timestamp`, it should process all emails.
                one_minute_overdue = expected_scheduled_timestamp + datetime.timedelta(
                    seconds=60)
                with self.assertLogs(level="INFO") as info_logs, patch(
                        "zerver.worker.queue_processors.timezone_now",
                        return_value=one_minute_overdue):
                    mmw.maybe_send_batched_emails()
                    self.assertEqual(
                        ScheduledMessageNotificationEmail.objects.count(), 0)

                    self.assert_length(info_logs.output, 2)
                    self.assertIn(
                        f"INFO:root:Batch-processing 3 missedmessage_emails events for user {hamlet.id}",
                        info_logs.output,
                    )
                    self.assertIn(
                        f"INFO:root:Batch-processing 1 missedmessage_emails events for user {othello.id}",
                        info_logs.output,
                    )

                    # All batches got processed. Verify that the timer isn't running.
                    self.assertEqual(mmw.timer_event, None)

        # Check that the frequency of calling maybe_send_batched_emails is correct (5 seconds)
        self.assertEqual(tm.call_args[0][0], 5)

        # Verify the payloads now
        args = [c[0] for c in sm.call_args_list]
        arg_dict = {
            arg[0].id: dict(
                missed_messages=arg[1],
                count=arg[2],
            )
            for arg in args
        }

        hamlet_info = arg_dict[hamlet.id]
        self.assertEqual(hamlet_info["count"], 3)
        self.assertEqual(
            {m["message"].content
             for m in hamlet_info["missed_messages"]},
            {"hi hamlet", "goodbye hamlet", "hello again hamlet"},
        )

        othello_info = arg_dict[othello.id]
        self.assertEqual(othello_info["count"], 1)
        self.assertEqual(
            {m["message"].content
             for m in othello_info["missed_messages"]},
            {"where art thou, othello?"},
        )
示例#23
0
    def test_mirror_worker_rate_limiting(self, mock_mirror_email: MagicMock) -> None:
        fake_client = self.FakeClient()
        realm = get_realm("zulip")
        RateLimitedRealmMirror(realm).clear_history()
        stream = get_stream("Denmark", realm)
        stream_to_address = encode_email_address(stream)
        data = [
            dict(
                msg_base64=base64.b64encode(b"\xf3test").decode(),
                time=time.time(),
                rcpt_to=stream_to_address,
            ),
        ] * 5
        for element in data:
            fake_client.enqueue("email_mirror", element)

        with simulated_queue_client(lambda: fake_client), self.assertLogs(
            "zerver.worker.queue_processors", level="WARNING"
        ) as warn_logs:
            start_time = time.time()
            with patch("time.time", return_value=start_time):
                worker = queue_processors.MirrorWorker()
                worker.setup()
                worker.start()
                # Of the first 5 messages, only 2 should be processed
                # (the rest being rate-limited):
                self.assertEqual(mock_mirror_email.call_count, 2)

                # If a new message is sent into the stream mirror, it will get rejected:
                fake_client.enqueue("email_mirror", data[0])
                worker.start()
                self.assertEqual(mock_mirror_email.call_count, 2)

                # However, message notification emails don't get rate limited:
                with self.settings(EMAIL_GATEWAY_PATTERN="*****@*****.**"):
                    address = "mm" + ("x" * 32) + "@example.com"
                    event = dict(
                        msg_base64=base64.b64encode(b"\xf3test").decode(),
                        time=time.time(),
                        rcpt_to=address,
                    )
                    fake_client.enqueue("email_mirror", event)
                    worker.start()
                    self.assertEqual(mock_mirror_email.call_count, 3)

            # After some times passes, emails get accepted again:
            with patch("time.time", return_value=(start_time + 11.0)):
                fake_client.enqueue("email_mirror", data[0])
                worker.start()
                self.assertEqual(mock_mirror_email.call_count, 4)

                # If RateLimiterLockingException is thrown, we rate-limit the new message:
                with patch(
                    "zerver.lib.rate_limiter.RedisRateLimiterBackend.incr_ratelimit",
                    side_effect=RateLimiterLockingException,
                ):
                    with self.assertLogs("zerver.lib.rate_limiter", "WARNING") as mock_warn:
                        fake_client.enqueue("email_mirror", data[0])
                        worker.start()
                        self.assertEqual(mock_mirror_email.call_count, 4)
                        self.assertEqual(
                            mock_warn.output,
                            [
                                "WARNING:zerver.lib.rate_limiter:Deadlock trying to incr_ratelimit for RateLimitedRealmMirror:zulip"
                            ],
                        )
        self.assertEqual(
            warn_logs.output,
            [
                "WARNING:zerver.worker.queue_processors:MirrorWorker: Rejecting an email from: None to realm: Zulip Dev - rate limited."
            ]
            * 5,
        )
示例#24
0
    def test_missed_message_worker(self) -> None:
        cordelia = self.example_user('cordelia')
        hamlet = self.example_user('hamlet')
        othello = self.example_user('othello')

        hamlet1_msg_id = self.send_personal_message(
            from_email=cordelia.email,
            to_email=hamlet.email,
            content='hi hamlet',
        )

        hamlet2_msg_id = self.send_personal_message(
            from_email=cordelia.email,
            to_email=hamlet.email,
            content='goodbye hamlet',
        )

        hamlet3_msg_id = self.send_personal_message(
            from_email=cordelia.email,
            to_email=hamlet.email,
            content='hello again hamlet',
        )

        othello_msg_id = self.send_personal_message(
            from_email=cordelia.email,
            to_email=othello.email,
            content='where art thou, othello?',
        )

        events = [
            dict(user_profile_id=hamlet.id, message_id=hamlet1_msg_id),
            dict(user_profile_id=hamlet.id, message_id=hamlet2_msg_id),
            dict(user_profile_id=othello.id, message_id=othello_msg_id),
        ]

        fake_client = self.FakeClient()
        for event in events:
            fake_client.queue.append(('missedmessage_emails', event))

        mmw = MissedMessageWorker()

        class MockTimer():
            is_running = False

            def is_alive(self) -> bool:
                return self.is_running

            def start(self) -> None:
                self.is_running = True

            def cancel(self) -> None:
                self.is_running = False

        timer = MockTimer()
        time_mock = patch(
            'zerver.worker.queue_processors.Timer',
            return_value=timer,
        )

        send_mock = patch(
            'zerver.lib.notifications.do_send_missedmessage_events_reply_in_zulip'
        )
        mmw.BATCH_DURATION = 0

        bonus_event = dict(user_profile_id=hamlet.id, message_id=hamlet3_msg_id)

        with send_mock as sm, time_mock as tm:
            with simulated_queue_client(lambda: fake_client):
                self.assertFalse(timer.is_alive())
                mmw.setup()
                mmw.start()
                self.assertTrue(timer.is_alive())
                fake_client.queue.append(('missedmessage_emails', bonus_event))

                # Double-calling start is our way to get it to run again
                self.assertTrue(timer.is_alive())
                mmw.start()

                # Now, we actually send the emails.
                mmw.maybe_send_batched_emails()
                self.assertFalse(timer.is_alive())

        self.assertEqual(tm.call_args[0][0], 5)  # should sleep 5 seconds

        args = [c[0] for c in sm.call_args_list]
        arg_dict = {
            arg[0].id: dict(
                missed_messages=arg[1],
                count=arg[2],
            )
            for arg in args
        }

        hamlet_info = arg_dict[hamlet.id]
        self.assertEqual(hamlet_info['count'], 3)
        self.assertEqual(
            {m['message'].content for m in hamlet_info['missed_messages']},
            {'hi hamlet', 'goodbye hamlet', 'hello again hamlet'},
        )

        othello_info = arg_dict[othello.id]
        self.assertEqual(othello_info['count'], 1)
        self.assertEqual(
            {m['message'].content for m in othello_info['missed_messages']},
            {'where art thou, othello?'}
        )