def test_parse(self): p = self._payload([ self._digest([ self._course([ self._thread("t00", [self._item("a"), self._item("b"), self._item("c")]), self._thread("t01", [self._item("d"), self._item("e"), self._item("f")]), self._thread("t02", [self._item("g"), self._item("h"), self._item("i")]), ]), self._course([ self._thread("t10", [self._item("j"), self._item("k"), self._item("l")]), self._thread("t11", [self._item("m"), self._item("n"), self._item("o")]), self._thread("t12", [self._item("p"), self._item("q"), self._item("r")]), ]), ]), self._digest([ self._course([ self._thread("t20", [self._item("A"), self._item("B"), self._item("C")]), self._thread("t21", [self._item("D"), self._item("E"), self._item("F")]), self._thread("t22", [self._item("G"), self._item("H"), self._item("I")]), ]), self._course([ self._thread("t30", [self._item("J"), self._item("K"), self._item("L")]), self._thread("t31", [self._item("M"), self._item("N"), self._item("O")]), self._thread("t32", [self._item("P"), self._item("Q"), self._item("R")]), ]), ]), ]) digest_count = 0 for user_id, parsed_digest in Parser.parse(p): #self._check_user(user_id, u, Parser.user(user_id, u)) self.assertIsNotNone(self._find_raw_digest(parsed_digest, p)) digest_count += 1 self.assertEqual(digest_count, len(p))
def test_generate_and_send_digests_retry_limit(self): """ """ data = json.load( open(join(dirname(__file__), 'cs_notifications.result.json'))) with patch('notifier.tasks.generate_digest_content', return_value=list(Parser.parse(data))) as p: # setting this here because override_settings doesn't seem to # work on celery task configuration decorators expected_num_tries = 1 + settings.FORUM_DIGEST_TASK_MAX_RETRIES mock_backend = Mock(name='mock_backend', send_messages=Mock( side_effect=SESMaxSendingRateExceededError(400, 'Throttling'))) with patch('notifier.connection_wrapper.dj_get_connection', return_value=mock_backend) as p2: # execute task - should fail, retry twice and still fail, then # give up try: task_result = generate_and_send_digests.delay( [usern(n) for n in xrange(2, 11)], datetime.datetime.now(), datetime.datetime.now()) except SESMaxSendingRateExceededError as e: self.assertEqual( mock_backend.send_messages.call_count, expected_num_tries) else: # should have raised self.fail('task did not retry twice before giving up')
def test_course_simple(self): c = self._course([ self._thread("t0", [self._item("a"), self._item("b"), self._item("c")]), self._thread("t1", [self._item("d"), self._item("e"), self._item("f")]), self._thread("t2", [self._item("g"), self._item("h"), self._item("i")]), ]) self._check_course("some_course_id", c, Parser.course("some_course_id", c))
def test_generate_and_send_digests_retry_limit(self): """ """ data = json.load( open(join(dirname(__file__), 'cs_notifications.result.json'))) with patch('notifier.tasks.generate_digest_content', return_value=list(Parser.parse(data))) as p: # setting this here because override_settings doesn't seem to # work on celery task configuration decorators expected_num_tries = 1 + settings.FORUM_DIGEST_TASK_MAX_RETRIES mock_backend = Mock( name='mock_backend', send_messages=Mock(side_effect=SESMaxSendingRateExceededError( 400, 'Throttling'))) with patch('notifier.connection_wrapper.dj_get_connection', return_value=mock_backend) as p2: # execute task - should fail, retry twice and still fail, then # give up try: task_result = generate_and_send_digests.delay( [usern(n) for n in xrange(2, 11)], datetime.datetime.now(), datetime.datetime.now()) except SESMaxSendingRateExceededError as e: self.assertEqual(mock_backend.send_messages.call_count, expected_num_tries) else: # should have raised self.fail('task did not retry twice before giving up')
def test_digest_simple(self): d = self._digest([ self._course([ self._thread("t00", [self._item("a"), self._item("b"), self._item("c")]), self._thread("t01", [self._item("d"), self._item("e"), self._item("f")]), self._thread("t02", [self._item("g"), self._item("h"), self._item("i")]), ]), self._course([ self._thread("t10", [self._item("j"), self._item("k"), self._item("l")]), self._thread("t11", [self._item("m"), self._item("n"), self._item("o")]), self._thread("t12", [self._item("p"), self._item("q"), self._item("r")]), ]), ]) self._check_digest("some_user_id", d, Parser.digest("some_user_id", d))
def test_generate_and_send_digests_rewrite_recipient(self): """ """ data = json.load( open(join(dirname(__file__), 'cs_notifications.result.json'))) with patch('notifier.tasks.generate_digest_content', return_value=Parser.parse(data)) as p: # execute task task_result = generate_and_send_digests.delay( (usern(n) for n in xrange(2, 11)), datetime.datetime.now(), datetime.datetime.now()) self.assertTrue(task_result.successful()) # all messages were sent self.assertTrue(hasattr(djmail, 'outbox')) self.assertEqual(9, len(djmail.outbox)) # all messages' email addresses were rewritten for message in djmail.outbox: self.assertEqual(message.to, ['*****@*****.**'])
def test_generate_and_send_digests(self): """ """ data = json.load( open(join(dirname(__file__), 'cs_notifications.result.json'))) user_id, digest = Parser.parse(data).next() user = usern(10) with patch('notifier.tasks.generate_digest_content', return_value=[(user_id, digest)]) as p: # execute task task_result = generate_and_send_digests.delay( [user], datetime.datetime.now(), datetime.datetime.now()) self.assertTrue(task_result.successful()) # message was sent self.assertTrue(hasattr(djmail, 'outbox')) self.assertEqual(1, len(djmail.outbox)) # message has expected to, from, subj, and content self._check_message(user, digest, djmail.outbox[0])
def test_thread_simple(self): t = self._thread("t", [self._item("a"), self._item("b"), self._item("c")]) self._check_thread("some_thread_id", "some_course_id", t, Parser.thread('some_thread_id', 'some_course_id', t))
def test_item_simple(self): i = self._item("a") self._check_item(i, Parser.item(i))