Beispiel #1
0
def generate_and_send_digests(users, from_dt, to_dt):
    """
    This task generates and sends forum digest emails to multiple users in a
    single background operation.

    `users` is an iterable of dictionaries, as returned by the edx user_api
    (required keys are "id", "name", "username", and "email").

    `from_dt` and `to_dt` are datetime objects representing the start and end
    of the time window for which to generate a digest.
    """
    users_by_id = dict((str(u['id']), u) for u in users)
    with closing(get_connection()) as cx:
        msgs = []
        for user_id, digest in generate_digest_content(users_by_id.keys(), from_dt, to_dt):
            user = users_by_id[user_id]
            # format the digest
            text, html = render_digest(
                user, digest, settings.FORUM_DIGEST_EMAIL_TITLE, settings.FORUM_DIGEST_EMAIL_DESCRIPTION)
            # send the message through our mailer
            msg = EmailMultiAlternatives(
                settings.FORUM_DIGEST_EMAIL_SUBJECT,
                text,
                '@'.join(
                    [settings.FORUM_DIGEST_EMAIL_SENDER,
                     settings.EMAIL_DOMAIN]),

                [user['email']]
            )
            msg.attach_alternative(html, "text/html")
            msgs.append(msg)
        if not msgs:
            return
        try:
            cx.send_messages(msgs)
        except SESMaxSendingRateExceededError as e:
            # we've tripped the per-second send rate limit.  we generally
            # rely  on the django_ses auto throttle to prevent this,
            # but in case we creep over, we can re-queue and re-try this task
            # - if and only if none of the messages in our batch were
            # sent yet.
            # this implementation is also non-ideal in that the data will be
            # fetched from the comments service again in the event of a retry.
            if not any((getattr(msg, 'extra_headers', {}).get('status') == 200 for msg in msgs)):
                raise generate_and_send_digests.retry(exc=e)
            else:
                # raise right away, since we don't support partial retry
                raise
Beispiel #2
0
def generate_and_send_digests(users, from_dt, to_dt):
    """
    This task generates and sends forum digest emails to multiple users in a
    single background operation.

    `users` is an iterable of dictionaries, as returned by the edx user_api
    (required keys are "id", "name", "username", and "email").

    `from_dt` and `to_dt` are datetime objects representing the start and end
    of the time window for which to generate a digest.
    """
    users_by_id = dict((str(u['id']), u) for u in users)
    with closing(get_connection()) as cx:
        msgs = []
        for user_id, digest in generate_digest_content(users_by_id.keys(),
                                                       from_dt, to_dt):
            user = users_by_id[user_id]
            # format the digest
            text, html = render_digest(user, digest,
                                       settings.FORUM_DIGEST_EMAIL_TITLE,
                                       settings.FORUM_DIGEST_EMAIL_DESCRIPTION)
            # send the message through our mailer
            msg = EmailMultiAlternatives(settings.FORUM_DIGEST_EMAIL_SUBJECT,
                                         text,
                                         settings.FORUM_DIGEST_EMAIL_SENDER,
                                         [user['email']])
            msg.attach_alternative(html, "text/html")
            msgs.append(msg)
        if not msgs:
            return
        try:
            cx.send_messages(msgs)
        except SESMaxSendingRateExceededError as e:
            # we've tripped the per-second send rate limit.  we generally
            # rely  on the django_ses auto throttle to prevent this,
            # but in case we creep over, we can re-queue and re-try this task
            # - if and only if none of the messages in our batch were
            # sent yet.
            # this implementation is also non-ideal in that the data will be
            # fetched from the comments service again in the event of a retry.
            if not any((getattr(msg, 'extra_headers', {}).get('status') == 200
                        for msg in msgs)):
                raise generate_and_send_digests.retry(exc=e)
            else:
                # raise right away, since we don't support partial retry
                raise
Beispiel #3
0
 def test_generate_digest_content(self):
     """
     """
     # empty result
     from_dt = datetime.datetime(2013, 1, 1)
     to_dt = datetime.datetime(2013, 1, 2)
     with patch('requests.post', return_value=Mock(json={})) as p:         
         g = generate_digest_content(["a", "b", "c"], from_dt, to_dt)
         expected_api_url = '*test_cs_url*/api/v1/notifications'
         expected_headers = {
             'X-Edx-Api-Key': '*test_cs_key*',
         }
         expected_post_data = {
             'user_ids': 'a,b,c',
             'from': '2013-01-01 00:00:00', # TODO tz offset
             'to': '2013-01-02 00:00:00'
         }
         p.assert_called_once_with(expected_api_url, headers=expected_headers, data=expected_post_data)            
         self.assertRaises(StopIteration, g.next)
Beispiel #4
0
 def test_empty_response(self):
     mock_response = make_mock_json_response()
     with patch('requests.post', return_value=mock_response) as p:
         g = generate_digest_content(
             {"a": {}, "b": {}, "c": {}},
             self.from_dt,
             self.to_dt
         )
         expected_api_url = '*test_cs_url*/api/v1/notifications'
         expected_headers = {
             'X-Edx-Api-Key': '*test_cs_key*',
         }
         expected_post_data = {
             'user_ids': 'a,b,c',
             'from': '2013-01-01 00:00:00',  # TODO tz offset
             'to': '2013-01-02 00:00:00'
         }
         p.assert_called_once_with(expected_api_url, headers=expected_headers, data=expected_post_data)            
         self.assertRaises(StopIteration, g.next)
Beispiel #5
0
def generate_and_send_digests(users, from_dt, to_dt):
    """
    This task generates and sends forum digest emails to multiple users in a
    single background operation.

    `users` is an iterable of dictionaries, as returned by the edx user_api
    (required keys are "id", "name", "email", "preferences", and "course_info").

    `from_dt` and `to_dt` are datetime objects representing the start and end
    of the time window for which to generate a digest.
    """
    logger.info("DIGEST TASK UPLOAD")
    users_by_id = dict((str(u['id']), u) for u in users)
    msgs = []
    try:
        with closing(get_connection()) as cx:
            for user_id, digest in generate_digest_content(
                    users_by_id, from_dt, to_dt):
                user = users_by_id[user_id]
                # format the digest
                text, html = render_digest(
                    user, digest, settings.FORUM_DIGEST_EMAIL_TITLE,
                    settings.FORUM_DIGEST_EMAIL_DESCRIPTION)
                # send the message through our mailer
                msg = EmailMultiAlternatives(
                    settings.FORUM_DIGEST_EMAIL_SUBJECT, text,
                    settings.FORUM_DIGEST_EMAIL_SENDER, [user['email']])
                msg.attach_alternative(html, "text/html")
                msgs.append(msg)
            if msgs:
                cx.send_messages(msgs)
            if settings.DEAD_MANS_SNITCH_URL:
                requests.post(settings.DEAD_MANS_SNITCH_URL)
    except (CommentsServiceException, SESMaxSendingRateExceededError) as e:
        # only retry if no messages were successfully sent yet.
        if not any((getattr(msg, 'extra_headers', {}).get('status') == 200
                    for msg in msgs)):

            raise generate_and_send_digests.retry(exc=e)
        else:
            # raise right away, since we don't support partial retry
            raise
Beispiel #6
0
    def show_rendered(self, fmt, users, from_dt, to_dt):
        users_by_id = dict((str(u['id']), u) for u in users)

        def _fail(msg):
            logger.warning('could not show rendered %s: %s', fmt, msg)

        try:
            user_id, digest = generate_digest_content(users_by_id, from_dt,
                                                      to_dt).next()
        except StopIteration:
            _fail('no digests found')
            return

        text, html = render_digest(users_by_id[user_id], digest,
                                   settings.FORUM_DIGEST_EMAIL_TITLE,
                                   settings.FORUM_DIGEST_EMAIL_DESCRIPTION)
        if fmt == 'text':
            print >> self.stdout, text
        elif fmt == 'html':
            print >> self.stdout, html
Beispiel #7
0
def generate_and_send_digests(users, from_dt, to_dt, language=None):
    """
    This task generates and sends forum digest emails to multiple users in a
    single background operation.

    `users` is an iterable of dictionaries, as returned by the edx user_api
    (required keys are "id", "name", "email", "preferences", and "course_info").

    `from_dt` and `to_dt` are datetime objects representing the start and end
    of the time window for which to generate a digest.
    """
    settings.LANGUAGE_CODE = language or settings.LANGUAGE_CODE or DEFAULT_LANGUAGE
    users_by_id = dict((str(u['id']), u) for u in users)
    msgs = []
    try:
        with closing(get_connection()) as cx:
            for user_id, digest in generate_digest_content(users_by_id, from_dt, to_dt):
                user = users_by_id[user_id]
                # format the digest
                text, html = render_digest(
                    user, digest, settings.FORUM_DIGEST_EMAIL_TITLE, settings.FORUM_DIGEST_EMAIL_DESCRIPTION)
                # send the message through our mailer
                msg = EmailMultiAlternatives(
                    settings.FORUM_DIGEST_EMAIL_SUBJECT,
                    text,
                    settings.FORUM_DIGEST_EMAIL_SENDER,
                    [user['email']]
                )
                msg.attach_alternative(html, "text/html")
                msgs.append(msg)
            if msgs:
                cx.send_messages(msgs)
            if settings.DEAD_MANS_SNITCH_URL:
                requests.post(settings.DEAD_MANS_SNITCH_URL)
    except (CommentsServiceException, SESMaxSendingRateExceededError) as e:
        # only retry if no messages were successfully sent yet.
        if not any((getattr(msg, 'extra_headers', {}).get('status') == 200 for msg in msgs)):
            raise generate_and_send_digests.retry(exc=e)
        else:
            # raise right away, since we don't support partial retry
            raise
Beispiel #8
0
    def show_rendered(self, fmt, users, from_dt, to_dt):
        users_by_id = dict((str(u['id']), u) for u in users)

        def _fail(msg):
            logger.warning('could not show rendered %s: %s', fmt, msg)

        try:
            user_id, digest = next(generate_digest_content(users_by_id, from_dt, to_dt))
        except StopIteration:
            _fail('no digests found')
            return

        text, html = render_digest(
            users_by_id[user_id],
            digest,
            settings.FORUM_DIGEST_EMAIL_TITLE,
            settings.FORUM_DIGEST_EMAIL_DESCRIPTION
        )
        if fmt == 'text':
            print >> self.stdout, text
        elif fmt == 'html':
            print >> self.stdout, html
Beispiel #9
0
 def show_content(self, users, from_dt, to_dt):
     users_by_id = dict((str(u['id']), u) for u in users)
     all_content = generate_digest_content(users_by_id, from_dt, to_dt)
     # use django's encoder; builtin one doesn't handle datetime objects
     json.dump(list(all_content), self.stdout, cls=DigestJSONEncoder)
Beispiel #10
0
    def test_cohort_filtering(self):
        """
        Test the generate_digest_content correctly filters digests according to user access to the threads.
        """
        gid_1 = 1
        gid_2 = 2
        # a group to which none of the test users belong
        gid_nousers = 99
        # a group in which none of the test threads exist
        gid_nothreads = 1001

        # Create a mock user information dict as would be returned from the user service (LMS).
        users_by_id = {
            "moderator": {
                "course_info": {
                    # This test file uses both currently known forms of course id strings to ensure
                    # that notifiers makes no assumptions about course key types. course-v1 is one
                    "course-v1:org+cohorted-course+run": {"see_all_cohorts": True, "cohort_id": None},
                    "course-v1:org+non-cohorted-course+run": {"see_all_cohorts": True, "cohort_id": None},
                },
                "expected_courses": ["course-v1:org+cohorted-course+run", "course-v1:org+non-cohorted-course+run"],
                "expected_threads": [
                    "group1-t01", "group2-t02", "all-groups-t03", "no-group-t11", "old-group-t12"
                ],
            },
            "group1_user": {
                "course_info": {
                    "course-v1:org+cohorted-course+run": {"see_all_cohorts": False, "cohort_id": gid_1},
                    "course-v1:org+non-cohorted-course+run": {"see_all_cohorts": True, "cohort_id": None},
                },
                "expected_courses": ["course-v1:org+cohorted-course+run", "course-v1:org+non-cohorted-course+run"],
                "expected_threads": ["group1-t01", "all-groups-t03", "no-group-t11", "old-group-t12"],
            },
            "group2_user": {
                "course_info": {
                    "course-v1:org+cohorted-course+run": {"see_all_cohorts": False, "cohort_id": gid_2},
                    "course-v1:org+non-cohorted-course+run": {"see_all_cohorts": True, "cohort_id": gid_nothreads},
                },
                "expected_courses": ["course-v1:org+cohorted-course+run", "course-v1:org+non-cohorted-course+run"],
                "expected_threads": ["group2-t02", "all-groups-t03", "no-group-t11", "old-group-t12"],
            },
            "unassigned_user": {
                "course_info": {
                    "course-v1:org+cohorted-course+run": {"see_all_cohorts": False, "cohort_id": None},
                    "course-v1:org+non-cohorted-course+run": {"see_all_cohorts": True, "cohort_id": None},
                },
                "expected_courses": ["course-v1:org+cohorted-course+run", "course-v1:org+non-cohorted-course+run"],
                "expected_threads": ["all-groups-t03", "no-group-t11", "old-group-t12"],
            },
            "unenrolled_user": {  # should receive no digest because not enrolled in any courses
                "course_info": {},
                "expected_courses": [],
                "expected_threads": [],
            },
            "one_course_empty_user": {
                "course_info": {
                    "course-v1:org+cohorted-course+run": {"see_all_cohorts": False, "cohort_id": gid_2},
                    "course-v1:all+cohorted-course+run": {"see_all_cohorts": False, "cohort_id": gid_nothreads},
                },
                "expected_courses": ["course-v1:org+cohorted-course+run"],
                "expected_threads": ["group2-t02", "all-groups-t03"],
            },
            "all_courses_empty_user": {  # should not get any digest, because group filter kicks in
                "course_info": {
                    "course-v1:all+cohorted-course+run": {"see_all_cohorts": False, "cohort_id": gid_nothreads},
                },
                "expected_courses": [],
                "expected_threads": [],
            },
        }
        user_ids = users_by_id.keys()

        # Create a mock payload with digest information as would be returned by the comments service.
        payload = {
            user_id: {
                "course-v1:org+cohorted-course+run": self._course([
                    self._thread("group1-t01", [self._item("a1"), self._item("b1"), self._item("c1")], gid_1),
                    self._thread("group2-t02", [self._item("a2"), self._item("b2"), self._item("c2")], gid_2),
                    self._thread("all-groups-t03", [self._item("a3"), self._item("b3"), self._item("c3")], None),
                ]),
                "course-v1:org+non-cohorted-course+run": self._course([
                    self._thread("no-group-t11", [self._item("a3"), self._item("b3"), self._item("c3")], None),
                    self._thread("old-group-t12", [self._item("a3"), self._item("b3"), self._item("c3")], gid_nousers),
                ]),
                "course-v1:all+cohorted-course+run": self._course([
                    self._thread("groupX-t01", [self._item("x")], gid_1),
                    self._thread("groupX-t01", [self._item("x")], gid_nousers),
                ]),
            }
            for user_id in user_ids
        }

        # Verify the notifier's generate_digest_content method correctly filters digests as expected.
        mock_response = make_mock_json_response(json=payload)
        with patch('requests.post', return_value=mock_response):
            filtered_digests = list(generate_digest_content(users_by_id, self.from_dt, self.to_dt))

            # Make sure the number of digests equals the number of users.
            # Otherwise, it's possible the guts of the for loop below never gets called.
            self.assertEquals(
                len(filtered_digests),
                len(filter(lambda u: len(u["expected_threads"]) > 0, users_by_id.values()))
            )

            # Verify the returned digests are as expected for each user.
            for user_id, digest in filtered_digests:
                self.assertSetEqual(
                    set(users_by_id[user_id]["expected_courses"]),
                    set([c.course_id for c in digest.courses]),
                    "Set of returned digest courses does not equal expected results"
                )

                thread_titles = [t.title for t in itertools.chain(*(c.threads for c in digest.courses))]
                self.assertSetEqual(
                    set(users_by_id[user_id]["expected_threads"]),
                    set(thread_titles),
                    "Set of returned digest threads does not equal expected results"
                )
Beispiel #11
0
 def show_content(self, users, from_dt, to_dt):
     users_by_id = dict((str(u['id']), u) for u in users)
     all_content = generate_digest_content(users_by_id, from_dt, to_dt)
     # use django's encoder; builtin one doesn't handle datetime objects
     json.dump(list(all_content), self.stdout, cls=DigestJSONEncoder)
Beispiel #12
0
class Command(BaseCommand):

    """
    """

    option_list = BaseCommand.option_list + (
        make_option('--to_datetime',
                    action='store',
                    dest='to_datetime',
                    default=None,
                    help='datetime as of which to generate digest content, in ISO-8601 format (UTC).  Defaults to today at midnight (UTC).'),
        make_option('--minutes',
                    action='store',
                    dest='minutes',
                    type='int',
                    default=1440,
                    help='number of minutes up to TO_DATETIME for which to generate digest content.  Defaults to 1440 (one day).'),
        make_option('--users',
                    action='store',
                    dest='users_str',
                    default=None,
                    help='send digests for the specified users only (regardless of opt-out settings!)'),
        make_option('--show-content',
                    action='store_true',
                    dest='show_content',
                    default=None,
                    help='output the retrieved content only (don\'t send anything)'),
        make_option('--show-users',
                    action='store_true',
                    dest='show_users',
                    default=None,
                    help='output the retrieved users only (don\'t fetch content or send anything)'),
        make_option('--show-text',
                    action='store_true',
                    dest='show_text',
                    default=None,
                    help='output the rendered text body of the first user-digest generated, and exit (don\'t send anything)'),
        make_option('--show-html',
                    action='store_true',
                    dest='show_html',
                    default=None,
                    help='output the rendered html body of the first user-digest generated, and exit (don\'t send anything)'),
    )

    def get_specific_users(self, user_ids):
        # this makes an individual HTTP request for each user -
        # it is only intended for use with small numbers of users
        # (e.g. for diagnostic purposes).
        users = []
        for user_id in user_ids:
            user = get_user(user_id)
            if user:
                users.append(user)
        return users

    def show_users(self, users):
        json.dump(list(users), self.stdout)

    def show_content(self, users, from_dt, to_dt):
        all_content = generate_digest_content(
            (u['id'] for u in users), from_dt, to_dt)
        # use django's encoder; builtin one doesn't handle datetime objects
        json.dump(list(all_content), self.stdout, cls=DigestJSONEncoder)

    def show_rendered(self, fmt, users, from_dt, to_dt):

        def _fail(msg):
            logger.warning('could not show rendered %s: %s', fmt, msg)

        try:
            user = list(users)[0]
        except IndexError, e:
            _fail('no users found')
            return

        try:
            user_id, digest = generate_digest_content(
                [user['id']], from_dt, to_dt).next()
        except StopIteration:
            _fail('no digests found')
            return

        text, html = render_digest(
            user, digest, settings.FORUM_DIGEST_EMAIL_TITLE, settings.FORUM_DIGEST_EMAIL_DESCRIPTION)
        if fmt == 'text':
            print >> self.stdout, text
        elif fmt == 'html':
            print >> self.stdout, html
Beispiel #13
0
 def show_content(self, users, from_dt, to_dt):
     all_content = generate_digest_content(
         [u['id'] for u in users], from_dt, to_dt)
     # use django's encoder; builtin one doesn't handle datetime objects
     json.dump(list(all_content), self.stdout, cls=DigestJSONEncoder)