示例#1
0
    def test_get_last_message_id(self) -> None:
        # get_last_message_id is a helper mainly used for RealmAuditLog
        self.assertEqual(
            get_last_message_id(),
            Message.objects.latest("id").id,
        )

        Message.objects.all().delete()

        self.assertEqual(get_last_message_id(), -1)
示例#2
0
def bulk_add_subs_to_db_with_logging(
    realm: Realm,
    acting_user: Optional[UserProfile],
    subs_to_add: List[SubInfo],
    subs_to_activate: List[SubInfo],
) -> None:

    Subscription.objects.bulk_create(info.sub for info in subs_to_add)
    sub_ids = [info.sub.id for info in subs_to_activate]
    Subscription.objects.filter(id__in=sub_ids).update(active=True)

    # Log subscription activities in RealmAuditLog
    event_time = timezone_now()
    event_last_message_id = get_last_message_id()

    all_subscription_logs: (List[RealmAuditLog]) = []
    for sub_info in subs_to_add:
        all_subscription_logs.append(
            RealmAuditLog(
                realm=realm,
                acting_user=acting_user,
                modified_user=sub_info.user,
                modified_stream=sub_info.stream,
                event_last_message_id=event_last_message_id,
                event_type=RealmAuditLog.SUBSCRIPTION_CREATED,
                event_time=event_time,
            )
        )
    for sub_info in subs_to_activate:
        all_subscription_logs.append(
            RealmAuditLog(
                realm=realm,
                acting_user=acting_user,
                modified_user=sub_info.user,
                modified_stream=sub_info.stream,
                event_last_message_id=event_last_message_id,
                event_type=RealmAuditLog.SUBSCRIPTION_ACTIVATED,
                event_time=event_time,
            )
        )
    # Now since we have all log objects generated we can do a bulk insert
    RealmAuditLog.objects.bulk_create(all_subscription_logs)
示例#3
0
文件: digest.py 项目: priyank-p/zulip
def bulk_write_realm_audit_logs(users: List[UserProfile]) -> None:
    if not users:
        return

    # We write RealmAuditLog rows for auditing, and we will also
    # use these rows during the next run to possibly exclude the
    # users (if insufficient time has passed).
    last_message_id = get_last_message_id()
    now = timezone_now()

    log_rows = [
        RealmAuditLog(
            realm_id=user.realm_id,
            modified_user_id=user.id,
            event_last_message_id=last_message_id,
            event_time=now,
            event_type=RealmAuditLog.USER_DIGEST_EMAIL_CREATED,
        ) for user in users
    ]

    RealmAuditLog.objects.bulk_create(log_rows)
示例#4
0
    def test_soft_deactivated_user_multiple_stream_senders(self) -> None:
        one_day_ago = timezone_now() - datetime.timedelta(days=1)
        Message.objects.all().update(date_sent=one_day_ago)

        digest_users = [
            self.example_user('othello'),
            self.example_user('aaron'),
            self.example_user('desdemona'),
            self.example_user('polonius'),
        ]
        digest_users.sort(key=lambda user: user.id)

        for digest_user in digest_users:
            for stream in ['Verona', 'Scotland', 'Denmark']:
                self.subscribe(digest_user, stream)

        RealmAuditLog.objects.all().delete()

        # Send messages to a stream and unsubscribe - subscribe from that stream
        senders = ['hamlet', 'cordelia', 'iago', 'prospero', 'ZOE']
        self.simulate_stream_conversation('Verona', senders)

        for digest_user in digest_users:
            self.unsubscribe(digest_user, 'Verona')
            self.subscribe(digest_user, 'Verona')

        # Send messages to other streams
        self.simulate_stream_conversation('Scotland', senders)
        self.simulate_stream_conversation('Denmark', senders)

        one_hour_ago = timezone_now() - datetime.timedelta(seconds=3600)
        cutoff = time.mktime(one_hour_ago.timetuple())

        flush_per_request_caches()

        # When this test is run in isolation, one additional query is run which
        # is equivalent to
        # ContentType.objects.get(app_label='zerver', model='userprofile')
        # This code is run when we call `confirmation.models.create_confirmation_link`.
        # To trigger this, we call the one_click_unsubscribe_link function below.
        one_click_unsubscribe_link(digest_users[0], 'digest')

        with mock.patch('zerver.lib.digest.send_future_email'
                        ) as mock_send_future_email:
            digest_user_ids = [user.id for user in digest_users]

            with queries_captured() as queries:
                with cache_tries_captured() as cache_tries:
                    bulk_handle_digest_email(digest_user_ids, cutoff)

            self.assert_length(queries, 12)
            self.assert_length(cache_tries, 0)

        self.assertEqual(mock_send_future_email.call_count, len(digest_users))

        for i, digest_user in enumerate(digest_users):
            kwargs = mock_send_future_email.call_args_list[i][1]
            self.assertEqual(kwargs['to_user_ids'], [digest_user.id])

            hot_conversations = kwargs['context']['hot_conversations']
            self.assertEqual(2, len(hot_conversations), [digest_user.id])

            hot_convo = hot_conversations[0]
            expected_participants = {
                self.example_user(sender).full_name
                for sender in senders
            }

            self.assertEqual(set(hot_convo['participants']),
                             expected_participants)
            self.assertEqual(hot_convo['count'],
                             5 - 2)  # 5 messages, but 2 shown
            teaser_messages = hot_convo['first_few_messages'][0]['senders']
            self.assertIn('some content',
                          teaser_messages[0]['content'][0]['plain'])
            self.assertIn(teaser_messages[0]['sender'], expected_participants)

        last_message_id = get_last_message_id()
        for digest_user in digest_users:
            log_rows = RealmAuditLog.objects.filter(
                modified_user_id=digest_user.id,
                event_type=RealmAuditLog.USER_DIGEST_EMAIL_CREATED,
            )
            (log, ) = log_rows
            self.assertEqual(log.event_last_message_id, last_message_id)
示例#5
0
def bulk_remove_subscriptions(
    realm: Realm,
    users: Iterable[UserProfile],
    streams: Iterable[Stream],
    *,
    acting_user: Optional[UserProfile],
) -> SubAndRemovedT:

    users = list(users)
    streams = list(streams)

    # Sanity check our callers
    for stream in streams:
        assert stream.realm_id == realm.id

    for user in users:
        assert user.realm_id == realm.id

    stream_dict = {stream.id: stream for stream in streams}

    existing_subs_by_user = get_bulk_stream_subscriber_info(users, streams)

    def get_non_subscribed_subs() -> List[Tuple[UserProfile, Stream]]:
        stream_ids = {stream.id for stream in streams}

        not_subscribed: List[Tuple[UserProfile, Stream]] = []

        for user_profile in users:
            user_sub_stream_info = existing_subs_by_user[user_profile.id]

            subscribed_stream_ids = {sub_info.stream.id for sub_info in user_sub_stream_info}
            not_subscribed_stream_ids = stream_ids - subscribed_stream_ids

            for stream_id in not_subscribed_stream_ids:
                stream = stream_dict[stream_id]
                not_subscribed.append((user_profile, stream))

        return not_subscribed

    not_subscribed = get_non_subscribed_subs()

    subs_to_deactivate: List[SubInfo] = []
    sub_ids_to_deactivate: List[int] = []

    # This loop just flattens out our data into big lists for
    # bulk operations.
    for sub_infos in existing_subs_by_user.values():
        for sub_info in sub_infos:
            subs_to_deactivate.append(sub_info)
            sub_ids_to_deactivate.append(sub_info.sub.id)

    # We do all the database changes in a transaction to ensure
    # RealmAuditLog entries are atomically created when making changes.
    with transaction.atomic():
        occupied_streams_before = list(get_occupied_streams(realm))
        Subscription.objects.filter(
            id__in=sub_ids_to_deactivate,
        ).update(active=False)
        occupied_streams_after = list(get_occupied_streams(realm))

        # Log subscription activities in RealmAuditLog
        event_time = timezone_now()
        event_last_message_id = get_last_message_id()
        all_subscription_logs = [
            RealmAuditLog(
                realm=sub_info.user.realm,
                acting_user=acting_user,
                modified_user=sub_info.user,
                modified_stream=sub_info.stream,
                event_last_message_id=event_last_message_id,
                event_type=RealmAuditLog.SUBSCRIPTION_DEACTIVATED,
                event_time=event_time,
            )
            for sub_info in subs_to_deactivate
        ]

        # Now since we have all log objects generated we can do a bulk insert
        RealmAuditLog.objects.bulk_create(all_subscription_logs)

    altered_user_dict: Dict[int, Set[int]] = defaultdict(set)
    streams_by_user: Dict[int, List[Stream]] = defaultdict(list)
    for sub_info in subs_to_deactivate:
        stream = sub_info.stream
        streams_by_user[sub_info.user.id].append(stream)
        altered_user_dict[stream.id].add(sub_info.user.id)

    for user_profile in users:
        if len(streams_by_user[user_profile.id]) == 0:
            continue
        notify_subscriptions_removed(realm, user_profile, streams_by_user[user_profile.id])

        event = {
            "type": "mark_stream_messages_as_read",
            "user_profile_id": user_profile.id,
            "stream_recipient_ids": [stream.recipient_id for stream in streams],
        }
        queue_json_publish("deferred_work", event)

    send_peer_remove_events(
        realm=realm,
        streams=streams,
        altered_user_dict=altered_user_dict,
    )

    new_vacant_streams = set(occupied_streams_before) - set(occupied_streams_after)
    new_vacant_private_streams = [stream for stream in new_vacant_streams if stream.invite_only]

    if new_vacant_private_streams:
        # Deactivate any newly-vacant private streams
        for stream in new_vacant_private_streams:
            do_deactivate_stream(stream, acting_user=acting_user)

    return (
        [(sub_info.user, sub_info.stream) for sub_info in subs_to_deactivate],
        not_subscribed,
    )