Ejemplo n.º 1
0
    def test_bulk_access_messages_public_stream(self) -> None:
        user = self.example_user("hamlet")
        self.login_user(user)

        # Testing messages accessiblity including a public stream message
        stream_name = "public_stream"
        stream = self.subscribe(user, stream_name)
        message_one_id = self.send_stream_message(user, stream_name, "Message one")

        later_subscribed_user = self.example_user("cordelia")
        self.subscribe(later_subscribed_user, stream_name)

        # Send a message after subscribing a new user to stream
        message_two_id = self.send_stream_message(user, stream_name, "Message two")

        message_ids = [message_one_id, message_two_id]
        messages = [
            Message.objects.select_related().get(id=message_id) for message_id in message_ids
        ]

        # All public stream messages are always accessible
        with queries_captured() as queries:
            filtered_messages = bulk_access_messages(later_subscribed_user, messages, stream=stream)
        self.assert_length(filtered_messages, 2)
        self.assert_length(queries, 2)

        unsubscribed_user = self.example_user("ZOE")
        with queries_captured() as queries:
            filtered_messages = bulk_access_messages(unsubscribed_user, messages, stream=stream)

        self.assert_length(filtered_messages, 2)
        self.assert_length(queries, 2)
Ejemplo n.º 2
0
    def test_bulk_access_messages_private_stream(self) -> None:
        user = self.example_user("hamlet")
        self.login_user(user)

        stream_name = "private_stream"
        stream = self.make_stream(
            stream_name, invite_only=True, history_public_to_subscribers=False
        )

        self.subscribe(user, stream_name)
        # Send a message before subscribing a new user to stream
        message_one_id = self.send_stream_message(user, stream_name, "Message one")

        later_subscribed_user = self.example_user("cordelia")
        # Subscribe a user to private-protected history stream
        self.subscribe(later_subscribed_user, stream_name)

        # Send a message after subscribing a new user to stream
        message_two_id = self.send_stream_message(user, stream_name, "Message two")

        message_ids = [message_one_id, message_two_id]
        messages = [
            Message.objects.select_related().get(id=message_id) for message_id in message_ids
        ]

        with queries_captured() as queries:
            filtered_messages = bulk_access_messages(later_subscribed_user, messages, stream=stream)
        self.assert_length(queries, 2)

        # Message sent before subscribing wouldn't be accessible by later
        # subscribed user as stream has protected history
        self.assert_length(filtered_messages, 1)
        self.assertEqual(filtered_messages[0].id, message_two_id)

        do_change_stream_invite_only(stream, True, history_public_to_subscribers=True)

        with queries_captured() as queries:
            filtered_messages = bulk_access_messages(later_subscribed_user, messages, stream=stream)
        self.assert_length(queries, 2)

        # Message sent before subscribing are accessible by 8user as stream
        # don't have protected history
        self.assert_length(filtered_messages, 2)

        # Testing messages accessiblity for an unsubscribed user
        unsubscribed_user = self.example_user("ZOE")

        with queries_captured() as queries:
            filtered_messages = bulk_access_messages(unsubscribed_user, messages, stream=stream)
        self.assert_length(queries, 2)

        self.assert_length(filtered_messages, 0)

        # Verify an exception is thrown if called where the passed
        # stream not matching the messages.
        with self.assertRaises(AssertionError):
            bulk_access_messages(
                unsubscribed_user, messages, stream=get_stream("Denmark", unsubscribed_user.realm)
            )
Ejemplo n.º 3
0
    def test_bulk_access_messages_private_stream(self) -> None:
        user = self.example_user("hamlet")
        self.login_user(user)

        stream_name = "private_stream"
        stream = self.make_stream(stream_name,
                                  invite_only=True,
                                  history_public_to_subscribers=False)

        self.subscribe(user, stream_name)
        # Send a message before subscribing a new user to stream
        message_one_id = self.send_stream_message(user, stream_name,
                                                  "Message one")

        later_subscribed_user = self.example_user("cordelia")
        # Subscribe a user to private-protected history stream
        self.subscribe(later_subscribed_user, stream_name)

        # Send a message after subscribing a new user to stream
        message_two_id = self.send_stream_message(user, stream_name,
                                                  "Message two")

        message_ids = [message_one_id, message_two_id]
        messages = [
            Message.objects.select_related().get(id=message_id)
            for message_id in message_ids
        ]

        filtered_messages = bulk_access_messages(later_subscribed_user,
                                                 messages)

        # Message sent before subscribing wouldn't be accessible by later
        # subscribed user as stream has protected history
        self.assertEqual(len(filtered_messages), 1)
        self.assertEqual(filtered_messages[0].id, message_two_id)

        do_change_stream_invite_only(stream,
                                     True,
                                     history_public_to_subscribers=True)

        filtered_messages = bulk_access_messages(later_subscribed_user,
                                                 messages)

        # Message sent before subscribing are accessible by 8user as stream
        # don't have protected history
        self.assertEqual(len(filtered_messages), 2)

        # Testing messages accessiblity for an unsubscribed user
        unsubscribed_user = self.example_user("ZOE")

        filtered_messages = bulk_access_messages(unsubscribed_user, messages)

        self.assertEqual(len(filtered_messages), 0)
Ejemplo n.º 4
0
def handle_missedmessage_emails(user_profile_id: int,
                                missed_email_events: Iterable[Dict[str, Any]]) -> None:
    message_ids = {event.get('message_id'): event.get('trigger') for event in missed_email_events}

    user_profile = get_user_profile_by_id(user_profile_id)
    if not receives_offline_email_notifications(user_profile):
        return

    messages = Message.objects.filter(usermessage__user_profile_id=user_profile,
                                      id__in=message_ids,
                                      usermessage__flags=~UserMessage.flags.read)

    # Cancel missed-message emails for deleted messages
    messages = [um for um in messages if um.content != "(deleted)"]

    if not messages:
        return

    messages_by_recipient_subject = defaultdict(list)  # type: Dict[Tuple[int, str], List[Message]]
    for msg in messages:
        if msg.recipient.type == Recipient.PERSONAL:
            # For PM's group using (recipient, sender).
            messages_by_recipient_subject[(msg.recipient_id, msg.sender_id)].append(msg)
        else:
            messages_by_recipient_subject[(msg.recipient_id, msg.topic_name())].append(msg)

    message_count_by_recipient_subject = {
        recipient_subject: len(msgs)
        for recipient_subject, msgs in messages_by_recipient_subject.items()
    }

    for msg_list in messages_by_recipient_subject.values():
        msg = min(msg_list, key=lambda msg: msg.pub_date)
        if msg.is_stream_message():
            context_messages = get_context_for_message(msg)
            filtered_context_messages = bulk_access_messages(user_profile, context_messages)
            msg_list.extend(filtered_context_messages)

    # Sort emails by least recently-active discussion.
    recipient_subjects = []  # type: List[Tuple[Tuple[int, str], int]]
    for recipient_subject, msg_list in messages_by_recipient_subject.items():
        max_message_id = max(msg_list, key=lambda msg: msg.id).id
        recipient_subjects.append((recipient_subject, max_message_id))

    recipient_subjects = sorted(recipient_subjects, key=lambda x: x[1])

    # Send an email per recipient subject pair
    for recipient_subject, ignored_max_id in recipient_subjects:
        unique_messages = {}
        for m in messages_by_recipient_subject[recipient_subject]:
            unique_messages[m.id] = dict(
                message=m,
                trigger=message_ids.get(m.id)
            )
        do_send_missedmessage_events_reply_in_zulip(
            user_profile,
            list(unique_messages.values()),
            message_count_by_recipient_subject[recipient_subject],
        )
Ejemplo n.º 5
0
def update_messages_for_topic_edit(
    acting_user: UserProfile,
    edited_message: Message,
    propagate_mode: str,
    orig_topic_name: str,
    topic_name: Optional[str],
    new_stream: Optional[Stream],
    old_stream: Stream,
    edit_history_event: EditHistoryEvent,
    last_edit_time: datetime,
) -> List[Message]:
    propagate_query = Q(recipient_id=old_stream.recipient_id,
                        subject__iexact=orig_topic_name)
    if propagate_mode == "change_all":
        propagate_query = propagate_query & ~Q(id=edited_message.id)
    if propagate_mode == "change_later":
        propagate_query = propagate_query & Q(id__gt=edited_message.id)

    messages = Message.objects.filter(propagate_query).select_related()

    update_fields = ["edit_history", "last_edit_time"]

    if new_stream is not None:
        # If we're moving the messages between streams, only move
        # messages that the acting user can access, so that one cannot
        # gain access to messages through moving them.
        from zerver.lib.message import bulk_access_messages

        messages_list = bulk_access_messages(acting_user,
                                             messages,
                                             stream=old_stream)
    else:
        # For single-message edits or topic moves within a stream, we
        # allow moving history the user may not have access in order
        # to keep topics together.
        messages_list = list(messages)

    # The cached ORM objects are not changed by the upcoming
    # messages.update(), and the remote cache update (done by the
    # caller) requires the new value, so we manually update the
    # objects in addition to sending a bulk query to the database.
    if new_stream is not None:
        update_fields.append("recipient")
        for m in messages_list:
            m.recipient = new_stream.recipient
    if topic_name is not None:
        update_fields.append("subject")
        for m in messages_list:
            m.set_topic_name(topic_name)

    for message in messages_list:
        update_edit_history(message, last_edit_time, edit_history_event)

    Message.objects.bulk_update(messages_list, update_fields)

    return messages_list
Ejemplo n.º 6
0
def handle_missedmessage_emails(
        user_profile_id: int,
        missed_email_events: Iterable[Dict[str, Any]]) -> None:
    message_ids = {
        event.get("message_id"): event.get("trigger")
        for event in missed_email_events
    }

    user_profile = get_user_profile_by_id(user_profile_id)
    if not receives_offline_email_notifications(user_profile):
        return

    # Note: This query structure automatically filters out any
    # messages that were permanently deleted, since those would now be
    # in the ArchivedMessage table, not the Message table.
    messages = Message.objects.filter(
        usermessage__user_profile_id=user_profile,
        id__in=message_ids,
        usermessage__flags=~UserMessage.flags.read,
    )

    # Cancel missed-message emails for deleted messages
    messages = [um for um in messages if um.content != "(deleted)"]

    if not messages:
        return

    # We bucket messages by tuples that identify similar messages.
    # For streams it's recipient_id and topic.
    # For PMs it's recipient id and sender.
    messages_by_bucket: Dict[Tuple[int, str],
                             List[Message]] = defaultdict(list)
    for msg in messages:
        if msg.recipient.type == Recipient.PERSONAL:
            # For PM's group using (recipient, sender).
            messages_by_bucket[(msg.recipient_id, msg.sender_id)].append(msg)
        else:
            messages_by_bucket[(msg.recipient_id,
                                msg.topic_name())].append(msg)

    message_count_by_bucket = {
        bucket_tup: len(msgs)
        for bucket_tup, msgs in messages_by_bucket.items()
    }

    for msg_list in messages_by_bucket.values():
        msg = min(msg_list, key=lambda msg: msg.date_sent)
        if msg.is_stream_message():
            context_messages = get_context_for_message(msg)
            filtered_context_messages = bulk_access_messages(
                user_profile, context_messages)
            msg_list.extend(filtered_context_messages)

    # Sort emails by least recently-active discussion.
    bucket_tups: List[Tuple[Tuple[int, str], int]] = []
    for bucket_tup, msg_list in messages_by_bucket.items():
        max_message_id = max(msg_list, key=lambda msg: msg.id).id
        bucket_tups.append((bucket_tup, max_message_id))

    bucket_tups = sorted(bucket_tups, key=lambda x: x[1])

    # Send an email per bucket.
    for bucket_tup, ignored_max_id in bucket_tups:
        unique_messages = {}
        for m in messages_by_bucket[bucket_tup]:
            unique_messages[m.id] = dict(
                message=m,
                trigger=message_ids.get(m.id),
            )
        do_send_missedmessage_events_reply_in_zulip(
            user_profile,
            list(unique_messages.values()),
            message_count_by_bucket[bucket_tup],
        )
Ejemplo n.º 7
0
def do_update_message(
    user_profile: UserProfile,
    target_message: Message,
    new_stream: Optional[Stream],
    topic_name: Optional[str],
    propagate_mode: Optional[str],
    send_notification_to_old_thread: bool,
    send_notification_to_new_thread: bool,
    content: Optional[str],
    rendering_result: Optional[MessageRenderingResult],
    prior_mention_user_ids: Set[int],
    mention_data: Optional[MentionData] = None,
) -> int:
    """
    The main function for message editing.  A message edit event can
    modify:
    * the message's content (in which case the caller will have
      set both content and rendered_content),
    * the topic, in which case the caller will have set topic_name
    * or both message's content and the topic
    * or stream and/or topic, in which case the caller will have set
        new_stream and/or topic_name.

    With topic edits, propagate_mode determines whether other message
    also have their topics edited.
    """
    timestamp = timezone_now()
    target_message.last_edit_time = timestamp

    event: Dict[str, Any] = {
        "type": "update_message",
        "user_id": user_profile.id,
        "edit_timestamp": datetime_to_timestamp(timestamp),
        "message_id": target_message.id,
        "rendering_only": False,
    }

    edit_history_event: EditHistoryEvent = {
        "user_id": user_profile.id,
        "timestamp": event["edit_timestamp"],
    }

    changed_messages = [target_message]

    realm = user_profile.realm

    stream_being_edited = None
    if target_message.is_stream_message():
        stream_id = target_message.recipient.type_id
        stream_being_edited = get_stream_by_id_in_realm(stream_id, realm)
        event["stream_name"] = stream_being_edited.name
        event["stream_id"] = stream_being_edited.id

    ums = UserMessage.objects.filter(message=target_message.id)

    if content is not None:
        assert rendering_result is not None

        # mention_data is required if there's a content edit.
        assert mention_data is not None

        # add data from group mentions to mentions_user_ids.
        for group_id in rendering_result.mentions_user_group_ids:
            members = mention_data.get_group_members(group_id)
            rendering_result.mentions_user_ids.update(members)

        update_user_message_flags(rendering_result, ums)

        # One could imagine checking realm.allow_edit_history here and
        # modifying the events based on that setting, but doing so
        # doesn't really make sense.  We need to send the edit event
        # to clients regardless, and a client already had access to
        # the original/pre-edit content of the message anyway.  That
        # setting must be enforced on the client side, and making a
        # change here simply complicates the logic for clients parsing
        # edit history events.
        event["orig_content"] = target_message.content
        event["orig_rendered_content"] = target_message.rendered_content
        edit_history_event["prev_content"] = target_message.content
        edit_history_event[
            "prev_rendered_content"] = target_message.rendered_content
        edit_history_event[
            "prev_rendered_content_version"] = target_message.rendered_content_version
        target_message.content = content
        target_message.rendered_content = rendering_result.rendered_content
        target_message.rendered_content_version = markdown_version
        event["content"] = content
        event["rendered_content"] = rendering_result.rendered_content
        event[
            "prev_rendered_content_version"] = target_message.rendered_content_version
        event["is_me_message"] = Message.is_status_message(
            content, rendering_result.rendered_content)

        # target_message.has_image and target_message.has_link will have been
        # already updated by Markdown rendering in the caller.
        target_message.has_attachment = check_attachment_reference_change(
            target_message, rendering_result)

        if target_message.is_stream_message():
            if topic_name is not None:
                new_topic_name = topic_name
            else:
                new_topic_name = target_message.topic_name()

            stream_topic: Optional[StreamTopicTarget] = StreamTopicTarget(
                stream_id=stream_id,
                topic_name=new_topic_name,
            )
        else:
            stream_topic = None

        info = get_recipient_info(
            realm_id=realm.id,
            recipient=target_message.recipient,
            sender_id=target_message.sender_id,
            stream_topic=stream_topic,
            possible_wildcard_mention=mention_data.message_has_wildcards(),
        )

        event["online_push_user_ids"] = list(info["online_push_user_ids"])
        event["pm_mention_push_disabled_user_ids"] = list(
            info["pm_mention_push_disabled_user_ids"])
        event["pm_mention_email_disabled_user_ids"] = list(
            info["pm_mention_email_disabled_user_ids"])
        event["stream_push_user_ids"] = list(info["stream_push_user_ids"])
        event["stream_email_user_ids"] = list(info["stream_email_user_ids"])
        event["muted_sender_user_ids"] = list(info["muted_sender_user_ids"])
        event["prior_mention_user_ids"] = list(prior_mention_user_ids)
        event["presence_idle_user_ids"] = filter_presence_idle_user_ids(
            info["active_user_ids"])
        event["all_bot_user_ids"] = list(info["all_bot_user_ids"])
        if rendering_result.mentions_wildcard:
            event["wildcard_mention_user_ids"] = list(
                info["wildcard_mention_user_ids"])
        else:
            event["wildcard_mention_user_ids"] = []

        do_update_mobile_push_notification(
            target_message,
            prior_mention_user_ids,
            rendering_result.mentions_user_ids,
            info["stream_push_user_ids"],
        )

    if topic_name is not None or new_stream is not None:
        assert propagate_mode is not None
        orig_topic_name = target_message.topic_name()
        event["propagate_mode"] = propagate_mode

    if new_stream is not None:
        assert content is None
        assert target_message.is_stream_message()
        assert stream_being_edited is not None

        edit_history_event["prev_stream"] = stream_being_edited.id
        edit_history_event["stream"] = new_stream.id
        event[ORIG_TOPIC] = orig_topic_name
        assert new_stream.recipient_id is not None
        target_message.recipient_id = new_stream.recipient_id

        event["new_stream_id"] = new_stream.id
        event["propagate_mode"] = propagate_mode

        # When messages are moved from one stream to another, some
        # users may lose access to those messages, including guest
        # users and users not subscribed to the new stream (if it is a
        # private stream).  For those users, their experience is as
        # though the messages were deleted, and we should send a
        # delete_message event to them instead.

        subs_to_old_stream = get_active_subscriptions_for_stream_id(
            stream_id,
            include_deactivated_users=True).select_related("user_profile")
        subs_to_new_stream = list(
            get_active_subscriptions_for_stream_id(
                new_stream.id,
                include_deactivated_users=True).select_related("user_profile"))

        old_stream_sub_ids = [
            user.user_profile_id for user in subs_to_old_stream
        ]
        new_stream_sub_ids = [
            user.user_profile_id for user in subs_to_new_stream
        ]

        # Get users who aren't subscribed to the new_stream.
        subs_losing_usermessages = [
            sub for sub in subs_to_old_stream
            if sub.user_profile_id not in new_stream_sub_ids
        ]
        # Users who can longer access the message without some action
        # from administrators.
        subs_losing_access = [
            sub for sub in subs_losing_usermessages
            if sub.user_profile.is_guest or not new_stream.is_public()
        ]
        ums = ums.exclude(user_profile_id__in=[
            sub.user_profile_id for sub in subs_losing_usermessages
        ])

        subs_gaining_usermessages = []
        if not new_stream.is_history_public_to_subscribers():
            # For private streams, with history not public to subscribers,
            # We find out users who are not present in the msgs' old stream
            # and create new UserMessage for these users so that they can
            # access this message.
            subs_gaining_usermessages += [
                user_id for user_id in new_stream_sub_ids
                if user_id not in old_stream_sub_ids
            ]

    if topic_name is not None:
        topic_name = truncate_topic(topic_name)
        target_message.set_topic_name(topic_name)

        # These fields have legacy field names.
        event[ORIG_TOPIC] = orig_topic_name
        event[TOPIC_NAME] = topic_name
        event[TOPIC_LINKS] = topic_links(target_message.sender.realm_id,
                                         topic_name)
        edit_history_event["prev_topic"] = orig_topic_name
        edit_history_event["topic"] = topic_name

    update_edit_history(target_message, timestamp, edit_history_event)

    delete_event_notify_user_ids: List[int] = []
    if propagate_mode in ["change_later", "change_all"]:
        assert topic_name is not None or new_stream is not None
        assert stream_being_edited is not None

        # Other messages should only get topic/stream fields in their edit history.
        topic_only_edit_history_event: EditHistoryEvent = {
            "user_id": edit_history_event["user_id"],
            "timestamp": edit_history_event["timestamp"],
        }
        if topic_name is not None:
            topic_only_edit_history_event["prev_topic"] = edit_history_event[
                "prev_topic"]
            topic_only_edit_history_event["topic"] = edit_history_event[
                "topic"]
        if new_stream is not None:
            topic_only_edit_history_event["prev_stream"] = edit_history_event[
                "prev_stream"]
            topic_only_edit_history_event["stream"] = edit_history_event[
                "stream"]

        messages_list = update_messages_for_topic_edit(
            acting_user=user_profile,
            edited_message=target_message,
            propagate_mode=propagate_mode,
            orig_topic_name=orig_topic_name,
            topic_name=topic_name,
            new_stream=new_stream,
            old_stream=stream_being_edited,
            edit_history_event=topic_only_edit_history_event,
            last_edit_time=timestamp,
        )
        changed_messages += messages_list

        if new_stream is not None:
            assert stream_being_edited is not None
            changed_message_ids = [msg.id for msg in changed_messages]

            if subs_gaining_usermessages:
                ums_to_create = []
                for message_id in changed_message_ids:
                    for user_profile_id in subs_gaining_usermessages:
                        # The fact that the user didn't have a UserMessage originally means we can infer that the user
                        # was not mentioned in the original message (even if mention syntax was present, it would not
                        # take effect for a user who was not subscribed). If we were editing the message's content, we
                        # would rerender the message and then use the new stream's data to determine whether this is
                        # a mention of a subscriber; but as we are not doing so, we choose to preserve the "was this
                        # mention syntax an actual mention" decision made during the original rendering for implementation
                        # simplicity. As a result, the only flag to consider applying here is read.
                        um = UserMessageLite(
                            user_profile_id=user_profile_id,
                            message_id=message_id,
                            flags=UserMessage.flags.read,
                        )
                        ums_to_create.append(um)
                bulk_insert_ums(ums_to_create)

            # Delete UserMessage objects for users who will no
            # longer have access to these messages.  Note: This could be
            # very expensive, since it's N guest users x M messages.
            UserMessage.objects.filter(
                user_profile_id__in=[
                    sub.user_profile_id for sub in subs_losing_usermessages
                ],
                message_id__in=changed_message_ids,
            ).delete()

            delete_event: DeleteMessagesEvent = {
                "type": "delete_message",
                "message_ids": changed_message_ids,
                "message_type": "stream",
                "stream_id": stream_being_edited.id,
                "topic": orig_topic_name,
            }
            delete_event_notify_user_ids = [
                sub.user_profile_id for sub in subs_losing_access
            ]
            send_event(user_profile.realm, delete_event,
                       delete_event_notify_user_ids)

            # Reset the Attachment.is_*_public caches for all messages
            # moved to another stream with different access permissions.
            if new_stream.invite_only != stream_being_edited.invite_only:
                Attachment.objects.filter(
                    messages__in=changed_message_ids).update(
                        is_realm_public=None, )
                ArchivedAttachment.objects.filter(
                    messages__in=changed_message_ids).update(
                        is_realm_public=None, )

            if new_stream.is_web_public != stream_being_edited.is_web_public:
                Attachment.objects.filter(
                    messages__in=changed_message_ids).update(
                        is_web_public=None, )
                ArchivedAttachment.objects.filter(
                    messages__in=changed_message_ids).update(
                        is_web_public=None, )

    # This does message.save(update_fields=[...])
    save_message_for_edit_use_case(message=target_message)

    realm_id: Optional[int] = None
    if stream_being_edited is not None:
        realm_id = stream_being_edited.realm_id

    event["message_ids"] = update_to_dict_cache(changed_messages, realm_id)

    def user_info(um: UserMessage) -> Dict[str, Any]:
        return {
            "id": um.user_profile_id,
            "flags": um.flags_list(),
        }

    # The following blocks arranges that users who are subscribed to a
    # stream and can see history from before they subscribed get
    # live-update when old messages are edited (e.g. if the user does
    # a topic edit themself).
    #
    # We still don't send an update event to users who are not
    # subscribed to this stream and don't have a UserMessage row. This
    # means if a non-subscriber is viewing the narrow, they won't get
    # a real-time updates. This is a balance between sending
    # message-edit notifications for every public stream to every user
    # in the organization (too expansive, and also not what we do for
    # newly sent messages anyway) and having magical live-updates
    # where possible.
    users_to_be_notified = list(map(user_info, ums))
    if stream_being_edited is not None:
        if stream_being_edited.is_history_public_to_subscribers():
            subscriptions = get_active_subscriptions_for_stream_id(
                stream_id, include_deactivated_users=False)
            # We exclude long-term idle users, since they by
            # definition have no active clients.
            subscriptions = subscriptions.exclude(
                user_profile__long_term_idle=True)
            # Remove duplicates by excluding the id of users already
            # in users_to_be_notified list.  This is the case where a
            # user both has a UserMessage row and is a current
            # Subscriber
            subscriptions = subscriptions.exclude(
                user_profile_id__in=[um.user_profile_id for um in ums])

            if new_stream is not None:
                assert delete_event_notify_user_ids is not None
                subscriptions = subscriptions.exclude(
                    user_profile_id__in=delete_event_notify_user_ids)

            # All users that are subscribed to the stream must be
            # notified when a message is edited
            subscriber_ids = set(
                subscriptions.values_list("user_profile_id", flat=True))

            if new_stream is not None:
                # TODO: Guest users don't see the new moved topic
                # unless breadcrumb message for new stream is
                # enabled. Excluding these users from receiving this
                # event helps us avoid a error traceback for our
                # clients. We should figure out a way to inform the
                # guest users of this new topic if sending a 'message'
                # event for these messages is not an option.
                #
                # Don't send this event to guest subs who are not
                # subscribers of the old stream but are subscribed to
                # the new stream; clients will be confused.
                old_stream_unsubbed_guests = [
                    sub for sub in subs_to_new_stream
                    if sub.user_profile.is_guest
                    and sub.user_profile_id not in subscriber_ids
                ]
                subscriptions = subscriptions.exclude(user_profile_id__in=[
                    sub.user_profile_id for sub in old_stream_unsubbed_guests
                ])
                subscriber_ids = set(
                    subscriptions.values_list("user_profile_id", flat=True))

            users_to_be_notified += list(
                map(subscriber_info, sorted(list(subscriber_ids))))

    # UserTopic updates and the content of notifications depend on
    # whether we've moved the entire topic, or just part of it. We
    # make that determination here.
    moved_all_visible_messages = False
    if topic_name is not None or new_stream is not None:
        assert stream_being_edited is not None

        if propagate_mode == "change_all":
            moved_all_visible_messages = True
        else:
            # With other propagate modes, if the user in fact moved
            # all messages in the stream, we want to explain it was a
            # full-topic move.
            #
            # For security model reasons, we don't want to allow a
            # user to take any action that would leak information
            # about older messages they cannot access (E.g. the only
            # remaining messages are in a stream without shared
            # history). The bulk_access_messages call below addresses
            # that concern.
            #
            # bulk_access_messages is inefficient for this task, since
            # we just want to do the exists() version of this
            # query. But it's nice to reuse code, and this bulk
            # operation is likely cheaper than a `GET /messages`
            # unless the topic has thousands of messages of history.
            assert stream_being_edited.recipient_id is not None
            unmoved_messages = messages_for_topic(
                stream_being_edited.recipient_id,
                orig_topic_name,
            )
            visible_unmoved_messages = bulk_access_messages(
                user_profile, unmoved_messages, stream=stream_being_edited)
            moved_all_visible_messages = len(visible_unmoved_messages) == 0

    # Migrate muted topic configuration in the following circumstances:
    #
    # * If propagate_mode is change_all, do so unconditionally.
    #
    # * If propagate_mode is change_later or change_one, do so when
    #   the acting user has moved the entire topic (as visible to them).
    #
    # This rule corresponds to checking moved_all_visible_messages.
    #
    # We may want more complex behavior in cases where one appears to
    # be merging topics (E.g. there are existing messages in the
    # target topic).
    if moved_all_visible_messages:
        assert stream_being_edited is not None
        assert topic_name is not None or new_stream is not None

        for muting_user in get_users_muting_topic(stream_being_edited.id,
                                                  orig_topic_name):
            # TODO: Ideally, this would be a bulk update operation,
            # because we are doing database operations in a loop here.
            #
            # This loop is only acceptable in production because it is
            # rare for more than a few users to have muted an
            # individual topic that is being moved; as of this
            # writing, no individual topic in Zulip Cloud had been
            # muted by more than 100 users.

            if new_stream is not None and muting_user.id in delete_event_notify_user_ids:
                # If the messages are being moved to a stream the user
                # cannot access, then we treat this as the
                # messages/topic being deleted for this user. This is
                # important for security reasons; we don't want to
                # give users a UserTopic row in a stream they cannot
                # access.  Unmute the topic for such users.
                do_unmute_topic(muting_user, stream_being_edited,
                                orig_topic_name)
            else:
                # Otherwise, we move the muted topic record for the user.
                # We call remove_topic_mute rather than do_unmute_topic to
                # avoid sending two events with new muted topics in
                # immediate succession; this is correct only because
                # muted_topics events always send the full set of topics.
                remove_topic_mute(muting_user, stream_being_edited.id,
                                  orig_topic_name)
                do_mute_topic(
                    muting_user,
                    new_stream
                    if new_stream is not None else stream_being_edited,
                    topic_name if topic_name is not None else orig_topic_name,
                    ignore_duplicate=True,
                )

    send_event(user_profile.realm, event, users_to_be_notified)

    if len(
            changed_messages
    ) > 0 and new_stream is not None and stream_being_edited is not None:
        # Notify users that the topic was moved.
        changed_messages_count = len(changed_messages)

        old_thread_notification_string = None
        if send_notification_to_old_thread:
            if moved_all_visible_messages:
                old_thread_notification_string = gettext_lazy(
                    "This topic was moved to {new_location} by {user}.")
            elif changed_messages_count == 1:
                old_thread_notification_string = gettext_lazy(
                    "A message was moved from this topic to {new_location} by {user}."
                )
            else:
                old_thread_notification_string = gettext_lazy(
                    "{changed_messages_count} messages were moved from this topic to {new_location} by {user}."
                )

        new_thread_notification_string = None
        if send_notification_to_new_thread:
            if moved_all_visible_messages:
                new_thread_notification_string = gettext_lazy(
                    "This topic was moved here from {old_location} by {user}.")
            elif changed_messages_count == 1:
                new_thread_notification_string = gettext_lazy(
                    "A message was moved here from {old_location} by {user}.")
            else:
                new_thread_notification_string = gettext_lazy(
                    "{changed_messages_count} messages were moved here from {old_location} by {user}."
                )

        send_message_moved_breadcrumbs(
            user_profile,
            stream_being_edited,
            orig_topic_name,
            old_thread_notification_string,
            new_stream,
            topic_name,
            new_thread_notification_string,
            changed_messages_count,
        )

    if (topic_name is not None and new_stream is None and content is None
            and len(changed_messages) > 0):
        assert stream_being_edited is not None
        maybe_send_resolve_topic_notifications(
            user_profile=user_profile,
            stream=stream_being_edited,
            old_topic=orig_topic_name,
            new_topic=topic_name,
            changed_messages=changed_messages,
        )

    return len(changed_messages)
Ejemplo n.º 8
0
def handle_missedmessage_emails(
        user_profile_id: int,
        missed_email_events: Iterable[Dict[str, Any]]) -> None:
    message_ids = {
        event.get("message_id"): {
            "trigger": event.get("trigger"),
            "mentioned_user_group_id": event.get("mentioned_user_group_id"),
        }
        for event in missed_email_events
    }

    user_profile = get_user_profile_by_id(user_profile_id)
    if user_profile.is_bot:  # nocoverage
        # We don't expect to reach here for bot users. However, this code exists
        # to find and throw away any pre-existing events in the queue while
        # upgrading from versions before our notifiability logic was implemented.
        # TODO/compatibility: This block can be removed when one can no longer
        # upgrade from versions <= 4.0 to versions >= 5.0
        logger.warning("Send-email event found for bot user %s. Skipping.",
                       user_profile_id)
        return

    if not user_profile.enable_offline_email_notifications:
        # BUG: Investigate why it's possible to get here.
        return  # nocoverage

    # Note: This query structure automatically filters out any
    # messages that were permanently deleted, since those would now be
    # in the ArchivedMessage table, not the Message table.
    messages = Message.objects.filter(
        usermessage__user_profile_id=user_profile,
        id__in=message_ids,
        usermessage__flags=~UserMessage.flags.read,
    )

    # Cancel missed-message emails for deleted messages
    messages = [um for um in messages if um.content != "(deleted)"]

    if not messages:
        return

    # We bucket messages by tuples that identify similar messages.
    # For streams it's recipient_id and topic.
    # For PMs it's recipient id and sender.
    messages_by_bucket: Dict[Tuple[int, str],
                             List[Message]] = defaultdict(list)
    for msg in messages:
        if msg.recipient.type == Recipient.PERSONAL:
            # For PM's group using (recipient, sender).
            messages_by_bucket[(msg.recipient_id, msg.sender_id)].append(msg)
        else:
            messages_by_bucket[(msg.recipient_id,
                                msg.topic_name())].append(msg)

    message_count_by_bucket = {
        bucket_tup: len(msgs)
        for bucket_tup, msgs in messages_by_bucket.items()
    }

    for msg_list in messages_by_bucket.values():
        msg = min(msg_list, key=lambda msg: msg.date_sent)
        if msg.is_stream_message():
            context_messages = get_context_for_message(msg)
            filtered_context_messages = bulk_access_messages(
                user_profile, context_messages)
            msg_list.extend(filtered_context_messages)

    # Sort emails by least recently-active discussion.
    bucket_tups: List[Tuple[Tuple[int, str], int]] = []
    for bucket_tup, msg_list in messages_by_bucket.items():
        max_message_id = max(msg_list, key=lambda msg: msg.id).id
        bucket_tups.append((bucket_tup, max_message_id))

    bucket_tups = sorted(bucket_tups, key=lambda x: x[1])

    # Send an email per bucket.
    for bucket_tup, ignored_max_id in bucket_tups:
        unique_messages = {}
        for m in messages_by_bucket[bucket_tup]:
            message_info = message_ids.get(m.id)
            unique_messages[m.id] = dict(
                message=m,
                trigger=message_info["trigger"] if message_info else None,
                mentioned_user_group_id=message_info.get(
                    "mentioned_user_group_id")
                if message_info is not None else None,
            )
        do_send_missedmessage_events_reply_in_zulip(
            user_profile,
            list(unique_messages.values()),
            message_count_by_bucket[bucket_tup],
        )
Ejemplo n.º 9
0
def handle_missedmessage_emails(user_profile_id: int,
                                missed_email_events: Iterable[Dict[str, Any]]) -> None:
    message_ids = {event.get('message_id'): event.get('trigger') for event in missed_email_events}

    user_profile = get_user_profile_by_id(user_profile_id)
    if not receives_offline_email_notifications(user_profile):
        return

    # Note: This query structure automatically filters out any
    # messages that were permanently deleted, since those would now be
    # in the ArchivedMessage table, not the Message table.
    messages = Message.objects.filter(usermessage__user_profile_id=user_profile,
                                      id__in=message_ids,
                                      usermessage__flags=~UserMessage.flags.read)

    # Cancel missed-message emails for deleted messages
    messages = [um for um in messages if um.content != "(deleted)"]

    if not messages:
        return

    # We bucket messages by tuples that identify similar messages.
    # For streams it's recipient_id and topic.
    # For PMs it's recipient id and sender.
    messages_by_bucket = defaultdict(list)  # type: Dict[Tuple[int, str], List[Message]]
    for msg in messages:
        if msg.recipient.type == Recipient.PERSONAL:
            # For PM's group using (recipient, sender).
            messages_by_bucket[(msg.recipient_id, msg.sender_id)].append(msg)
        else:
            messages_by_bucket[(msg.recipient_id, msg.topic_name())].append(msg)

    message_count_by_bucket = {
        bucket_tup: len(msgs)
        for bucket_tup, msgs in messages_by_bucket.items()
    }

    for msg_list in messages_by_bucket.values():
        msg = min(msg_list, key=lambda msg: msg.pub_date)
        if msg.is_stream_message():
            context_messages = get_context_for_message(msg)
            filtered_context_messages = bulk_access_messages(user_profile, context_messages)
            msg_list.extend(filtered_context_messages)

    # Sort emails by least recently-active discussion.
    bucket_tups = []  # type: List[Tuple[Tuple[int, str], int]]
    for bucket_tup, msg_list in messages_by_bucket.items():
        max_message_id = max(msg_list, key=lambda msg: msg.id).id
        bucket_tups.append((bucket_tup, max_message_id))

    bucket_tups = sorted(bucket_tups, key=lambda x: x[1])

    # Send an email per bucket.
    for bucket_tup, ignored_max_id in bucket_tups:
        unique_messages = {}
        for m in messages_by_bucket[bucket_tup]:
            unique_messages[m.id] = dict(
                message=m,
                trigger=message_ids.get(m.id)
            )
        do_send_missedmessage_events_reply_in_zulip(
            user_profile,
            list(unique_messages.values()),
            message_count_by_bucket[bucket_tup],
        )