Exemplo n.º 1
0
def gather_hot_conversations(user_profile: UserProfile, stream_messages: QuerySet) -> List[Dict[str, Any]]:
    # Gather stream conversations of 2 types:
    # 1. long conversations
    # 2. conversations where many different people participated
    #
    # Returns a list of dictionaries containing the templating
    # information for each hot conversation.

    conversation_length = defaultdict(int)  # type: Dict[Tuple[int, Text], int]
    conversation_diversity = defaultdict(set)  # type: Dict[Tuple[int, Text], Set[Text]]
    for user_message in stream_messages:
        if not user_message.message.sent_by_human():
            # Don't include automated messages in the count.
            continue

        key = (user_message.message.recipient.type_id,
               user_message.message.subject)
        conversation_diversity[key].add(
            user_message.message.sender.full_name)
        conversation_length[key] += 1

    diversity_list = list(conversation_diversity.items())
    diversity_list.sort(key=lambda entry: len(entry[1]), reverse=True)

    length_list = list(conversation_length.items())
    length_list.sort(key=lambda entry: entry[1], reverse=True)

    # Get up to the 4 best conversations from the diversity list
    # and length list, filtering out overlapping conversations.
    hot_conversations = [elt[0] for elt in diversity_list[:2]]
    for candidate, _ in length_list:
        if candidate not in hot_conversations:
            hot_conversations.append(candidate)
        if len(hot_conversations) >= 4:
            break

    # There was so much overlap between the diversity and length lists that we
    # still have < 4 conversations. Try to use remaining diversity items to pad
    # out the hot conversations.
    num_convos = len(hot_conversations)
    if num_convos < 4:
        hot_conversations.extend([elt[0] for elt in diversity_list[num_convos:4]])

    hot_conversation_render_payloads = []
    for h in hot_conversations:
        stream_id, subject = h
        users = list(conversation_diversity[h])
        count = conversation_length[h]

        # We'll display up to 2 messages from the conversation.
        first_few_messages = [user_message.message for user_message in
                              stream_messages.filter(
                                  message__recipient__type_id=stream_id,
                                  message__subject=subject)[:2]]

        teaser_data = {"participants": users,
                       "count": count - len(first_few_messages),
                       "first_few_messages": build_message_list(
                           user_profile, first_few_messages)}

        hot_conversation_render_payloads.append(teaser_data)
    return hot_conversation_render_payloads
Exemplo n.º 2
0
def handle_digest_email(user_profile_id: int, cutoff: float) -> None:
    user_profile = get_user_profile_by_id(user_profile_id)

    # We are disabling digest emails for soft deactivated users for the time.
    # TODO: Find an elegant way to generate digest emails for these users.
    if user_profile.long_term_idle:
        return None

    # Convert from epoch seconds to a datetime object.
    cutoff_date = datetime.datetime.fromtimestamp(int(cutoff), tz=pytz.utc)

    all_messages = UserMessage.objects.filter(
        user_profile=user_profile,
        message__pub_date__gt=cutoff_date).order_by("message__pub_date")

    context = common_context(user_profile)

    # Start building email template data.
    context.update({
        'realm_name': user_profile.realm.name,
        'name': user_profile.full_name,
        'unsubscribe_link': one_click_unsubscribe_link(user_profile, "digest")
    })

    # Gather recent missed PMs, re-using the missed PM email logic.
    # You can't have an unread message that you sent, but when testing
    # this causes confusion so filter your messages out.
    pms = all_messages.filter(
        ~Q(message__recipient__type=Recipient.STREAM) &
        ~Q(message__sender=user_profile))

    # Show up to 4 missed PMs.
    pms_limit = 4

    context['unread_pms'] = build_message_list(
        user_profile, [pm.message for pm in pms[:pms_limit]])
    context['remaining_unread_pms_count'] = min(0, len(pms) - pms_limit)

    home_view_recipients = [sub.recipient for sub in
                            Subscription.objects.filter(
                                user_profile=user_profile,
                                active=True,
                                in_home_view=True)]

    stream_messages = all_messages.filter(
        message__recipient__type=Recipient.STREAM,
        message__recipient__in=home_view_recipients)

    # Gather hot conversations.
    context["hot_conversations"] = gather_hot_conversations(
        user_profile, stream_messages)

    # Gather new streams.
    new_streams_count, new_streams = gather_new_streams(
        user_profile, cutoff_date)
    context["new_streams"] = new_streams
    context["new_streams_count"] = new_streams_count

    # Gather users who signed up recently.
    new_users_count, new_users = gather_new_users(
        user_profile, cutoff_date)
    context["new_users"] = new_users

    # We don't want to send emails containing almost no information.
    if enough_traffic(context["unread_pms"], context["hot_conversations"],
                      new_streams_count, new_users_count):
        logger.info("Sending digest email for %s" % (user_profile.email,))
        # Send now, as a ScheduledEmail
        send_future_email('zerver/emails/digest', to_user_id=user_profile.id,
                          from_name="Zulip Digest", from_address=FromAddress.NOREPLY,
                          context=context)
Exemplo n.º 3
0
def gather_hot_conversations(user_profile, stream_messages):
    # type: (UserProfile, QuerySet) -> List[Dict[str, Any]]
    # Gather stream conversations of 2 types:
    # 1. long conversations
    # 2. conversations where many different people participated
    #
    # Returns a list of dictionaries containing the templating
    # information for each hot conversation.

    conversation_length = defaultdict(int) # type: Dict[Tuple[int, text_type], int]
    conversation_diversity = defaultdict(set) # type: Dict[Tuple[int, text_type], Set[text_type]]
    for user_message in stream_messages:
        if not user_message.message.sent_by_human():
            # Don't include automated messages in the count.
            continue

        key = (user_message.message.recipient.type_id,
               user_message.message.subject)
        conversation_diversity[key].add(
            user_message.message.sender.full_name)
        conversation_length[key] += 1

    diversity_list = list(conversation_diversity.items())
    diversity_list.sort(key=lambda entry: len(entry[1]), reverse=True)

    length_list = list(conversation_length.items())
    length_list.sort(key=lambda entry: entry[1], reverse=True)

    # Get up to the 4 best conversations from the diversity list
    # and length list, filtering out overlapping conversations.
    hot_conversations = [elt[0] for elt in diversity_list[:2]]
    for candidate, _ in length_list:
        if candidate not in hot_conversations:
            hot_conversations.append(candidate)
        if len(hot_conversations) >= 4:
            break

    # There was so much overlap between the diversity and length lists that we
    # still have < 4 conversations. Try to use remaining diversity items to pad
    # out the hot conversations.
    num_convos = len(hot_conversations)
    if num_convos < 4:
        hot_conversations.extend([elt[0] for elt in diversity_list[num_convos:4]])

    hot_conversation_render_payloads = []
    for h in hot_conversations:
        stream_id, subject = h
        users = list(conversation_diversity[h])
        count = conversation_length[h]

        # We'll display up to 2 messages from the conversation.
        first_few_messages = [user_message.message for user_message in \
                                  stream_messages.filter(
                message__recipient__type_id=stream_id,
                message__subject=subject)[:2]]

        teaser_data = {"participants": users,
                       "count": count - len(first_few_messages),
                       "first_few_messages": build_message_list(
                user_profile, first_few_messages)}

        hot_conversation_render_payloads.append(teaser_data)
    return hot_conversation_render_payloads
Exemplo n.º 4
0
def handle_digest_email(user_profile_id, cutoff):
    # type: (int, int) -> None
    user_profile=UserProfile.objects.get(id=user_profile_id)
    # Convert from epoch seconds to a datetime object.
    cutoff_date = datetime.datetime.utcfromtimestamp(int(cutoff))

    all_messages = UserMessage.objects.filter(
        user_profile=user_profile,
        message__pub_date__gt=cutoff_date).order_by("message__pub_date")

    # Start building email template data.
    template_payload = {
        'name': user_profile.full_name,
        'external_host': settings.EXTERNAL_HOST,
        'external_uri_scheme': settings.EXTERNAL_URI_SCHEME,
        'server_uri': settings.SERVER_URI,
        'realm_uri': user_profile.realm.uri,
        'unsubscribe_link': one_click_unsubscribe_link(user_profile, "digest")
        } # type: Dict[str, Any]

    # Gather recent missed PMs, re-using the missed PM email logic.
    # You can't have an unread message that you sent, but when testing
    # this causes confusion so filter your messages out.
    pms = all_messages.filter(
        ~Q(message__recipient__type=Recipient.STREAM) & \
             ~Q(message__sender=user_profile))

    # Show up to 4 missed PMs.
    pms_limit = 4

    template_payload['unread_pms'] = build_message_list(
        user_profile, [pm.message for pm in pms[:pms_limit]])
    template_payload['remaining_unread_pms_count'] = min(0, len(pms) - pms_limit)

    home_view_recipients = [sub.recipient for sub in \
                                Subscription.objects.filter(
            user_profile=user_profile, active=True, in_home_view=True)]

    stream_messages = all_messages.filter(
        message__recipient__type=Recipient.STREAM,
        message__recipient__in=home_view_recipients)

    # Gather hot conversations.
    template_payload["hot_conversations"] = gather_hot_conversations(
        user_profile, stream_messages)

    # Gather new streams.
    new_streams_count, new_streams = gather_new_streams(
        user_profile, cutoff_date)
    template_payload["new_streams"] = new_streams
    template_payload["new_streams_count"] = new_streams_count

    # Gather users who signed up recently.
    new_users_count, new_users = gather_new_users(
        user_profile, cutoff_date)
    template_payload["new_users"] = new_users

    text_content = loader.render_to_string(
        'zerver/emails/digest/digest_email.txt', template_payload)
    html_content = loader.render_to_string(
        'zerver/emails/digest/digest_email_html.txt', template_payload)

    # We don't want to send emails containing almost no information.
    if enough_traffic(template_payload["unread_pms"],
                      template_payload["hot_conversations"],
                      new_streams_count, new_users_count):
        logger.info("Sending digest email for %s" % (user_profile.email,))
        send_digest_email(user_profile, html_content, text_content)
Exemplo n.º 5
0
def handle_digest_email(user_profile_id: int, cutoff: float) -> None:
    user_profile = get_user_profile_by_id(user_profile_id)

    # We are disabling digest emails for soft deactivated users for the time.
    # TODO: Find an elegant way to generate digest emails for these users.
    if user_profile.long_term_idle:
        return None

    # Convert from epoch seconds to a datetime object.
    cutoff_date = datetime.datetime.fromtimestamp(int(cutoff), tz=pytz.utc)

    all_messages = UserMessage.objects.filter(
        user_profile=user_profile,
        message__pub_date__gt=cutoff_date).order_by("message__pub_date")

    context = common_context(user_profile)

    # Start building email template data.
    context.update({
        'realm_name': user_profile.realm.name,
        'name': user_profile.full_name,
        'unsubscribe_link': one_click_unsubscribe_link(user_profile, "digest")
    })

    # Gather recent missed PMs, re-using the missed PM email logic.
    # You can't have an unread message that you sent, but when testing
    # this causes confusion so filter your messages out.
    pms = all_messages.filter(
        ~Q(message__recipient__type=Recipient.STREAM) &
        ~Q(message__sender=user_profile))

    # Show up to 4 missed PMs.
    pms_limit = 4

    context['unread_pms'] = build_message_list(
        user_profile, [pm.message for pm in pms[:pms_limit]])
    context['remaining_unread_pms_count'] = min(0, len(pms) - pms_limit)

    home_view_recipients = [sub.recipient for sub in
                            Subscription.objects.filter(
                                user_profile=user_profile,
                                active=True,
                                in_home_view=True)]

    stream_messages = all_messages.filter(
        message__recipient__type=Recipient.STREAM,
        message__recipient__in=home_view_recipients)

    # Gather hot conversations.
    context["hot_conversations"] = gather_hot_conversations(
        user_profile, stream_messages)

    # Gather new streams.
    new_streams_count, new_streams = gather_new_streams(
        user_profile, cutoff_date)
    context["new_streams"] = new_streams
    context["new_streams_count"] = new_streams_count

    # Gather users who signed up recently.
    new_users_count, new_users = gather_new_users(
        user_profile, cutoff_date)
    context["new_users"] = new_users

    # We don't want to send emails containing almost no information.
    if enough_traffic(context["unread_pms"], context["hot_conversations"],
                      new_streams_count, new_users_count):
        logger.info("Sending digest email for %s" % (user_profile.email,))
        # Send now, as a ScheduledEmail
        send_future_email('zerver/emails/digest', user_profile.realm, to_user_id=user_profile.id,
                          from_name="Zulip Digest", from_address=FromAddress.NOREPLY,
                          context=context)
Exemplo n.º 6
0
def build_digest(user_profile: UserProfile, cutoff: float) -> None:
    # We are disabling digest emails for soft deactivated users for the time.
    # TODO: Find an elegant way to generate digest emails for these users.
    if user_profile.long_term_idle:
        return None

    # Convert from epoch seconds to a datetime object.
    cutoff_date = datetime.datetime.fromtimestamp(int(cutoff), tz=pytz.utc)

    all_messages = UserMessage.objects.filter(
        user_profile=user_profile,
        message__pub_date__gt=cutoff_date).order_by("message__pub_date")

    context = common_context(user_profile)

    # Start building email template data.
    context.update({
        'realm_name': user_profile.realm.name,
        'name': user_profile.full_name,
        'unsubscribe_link': one_click_unsubscribe_link(user_profile, "digest")
    })

    # Gather recent missed PMs, re-using the missed PM email logic.
    # You can't have an unread message that you sent, but when testing
    # this causes confusion so filter your messages out.
    pms = all_messages.filter(
        ~Q(message__recipient__type=Recipient.STREAM) &
        ~Q(message__sender=user_profile))

    # Show up to 4 missed PMs.
    pms_limit = 4

    context['unread_pms'] = build_message_list(
        user_profile, [pm.message for pm in pms[:pms_limit]])
    context['remaining_unread_pms_count'] = min(0, len(pms) - pms_limit)

    home_view_recipients = [sub.recipient for sub in
                            Subscription.objects.filter(
                                user_profile=user_profile,
                                active=True,
                                in_home_view=True)]

    stream_messages = all_messages.filter(
        message__recipient__type=Recipient.STREAM,
        message__recipient__in=home_view_recipients)

    # Gather hot conversations.
    context["hot_conversations"] = gather_hot_conversations(
        user_profile, stream_messages)

    # Gather new streams.
    new_streams_count, new_streams = gather_new_streams(
        user_profile, cutoff_date)
    context["new_streams"] = new_streams
    context["new_streams_count"] = new_streams_count

    # Gather users who signed up recently.
    new_users_count, new_users = gather_new_users(
        user_profile, cutoff_date)
    context["new_users"] = new_users

    return context