Пример #1
0
def do_resend_user_invite_email(prereg_user: PreregistrationUser) -> int:
    # These are two structurally for the caller's code path.
    assert prereg_user.referred_by is not None
    assert prereg_user.realm is not None

    check_invite_limit(prereg_user.referred_by.realm, 1)

    prereg_user.invited_at = timezone_now()
    prereg_user.save()

    expiry_date = prereg_user.confirmation.get().expiry_date
    if expiry_date is None:
        invite_expires_in_minutes = None
    else:
        # The resent invitation is reset to expire as long after the
        # reminder is sent as it lasted originally.
        invite_expires_in_minutes = (
            expiry_date - prereg_user.invited_at).total_seconds() / 60
    prereg_user.confirmation.clear()

    do_increment_logging_stat(prereg_user.realm,
                              COUNT_STATS["invites_sent::day"], None,
                              prereg_user.invited_at)

    clear_scheduled_invitation_emails(prereg_user.email)
    # We don't store the custom email body, so just set it to None
    event = {
        "prereg_id": prereg_user.id,
        "referrer_id": prereg_user.referred_by.id,
        "email_language": prereg_user.referred_by.realm.default_language,
        "invite_expires_in_minutes": invite_expires_in_minutes,
    }
    queue_json_publish("invites", event)

    return datetime_to_timestamp(prereg_user.invited_at)
Пример #2
0
def do_reactivate_user(user_profile: UserProfile, *,
                       acting_user: Optional[UserProfile]) -> None:
    """Reactivate a user that had previously been deactivated"""
    with transaction.atomic():
        change_user_is_active(user_profile, True)

        event_time = timezone_now()
        RealmAuditLog.objects.create(
            realm=user_profile.realm,
            modified_user=user_profile,
            acting_user=acting_user,
            event_type=RealmAuditLog.USER_REACTIVATED,
            event_time=event_time,
            extra_data=orjson.dumps({
                RealmAuditLog.ROLE_COUNT:
                realm_user_count_by_role(user_profile.realm),
            }).decode(),
        )
        do_increment_logging_stat(
            user_profile.realm,
            COUNT_STATS["active_users_log:is_bot:day"],
            user_profile.is_bot,
            event_time,
        )
        if settings.BILLING_ENABLED:
            update_license_ledger_if_needed(user_profile.realm, event_time)

    notify_created_user(user_profile)

    if user_profile.is_bot:
        notify_created_bot(user_profile)

    subscribed_recipient_ids = Subscription.objects.filter(
        user_profile_id=user_profile.id,
        active=True,
        recipient__type=Recipient.STREAM).values_list("recipient__type_id",
                                                      flat=True)
    subscribed_streams = Stream.objects.filter(id__in=subscribed_recipient_ids,
                                               deactivated=False)
    subscriber_peer_info = bulk_get_subscriber_peer_info(
        realm=user_profile.realm,
        streams=subscribed_streams,
    )

    altered_user_dict: Dict[int, Set[int]] = defaultdict(set)
    for stream in subscribed_streams:
        altered_user_dict[stream.id] = {user_profile.id}

    stream_dict = {stream.id: stream for stream in subscribed_streams}

    send_peer_subscriber_events(
        op="peer_add",
        realm=user_profile.realm,
        altered_user_dict=altered_user_dict,
        stream_dict=stream_dict,
        private_peer_dict=subscriber_peer_info.private_peer_dict,
    )
Пример #3
0
def do_mark_stream_messages_as_read(
    user_profile: UserProfile, stream_recipient_id: int, topic_name: Optional[str] = None
) -> int:
    log_statsd_event("mark_stream_as_read")

    msgs = UserMessage.objects.filter(
        user_profile=user_profile,
    )

    msgs = msgs.filter(message__recipient_id=stream_recipient_id)

    if topic_name:
        msgs = filter_by_topic_name_via_message(
            query=msgs,
            topic_name=topic_name,
        )

    msgs = msgs.extra(
        where=[UserMessage.where_unread()],
    )

    message_ids = list(msgs.values_list("message_id", flat=True))

    count = msgs.update(
        flags=F("flags").bitor(UserMessage.flags.read),
    )

    event = asdict(
        ReadMessagesEvent(
            messages=message_ids,
            all=False,
        )
    )
    event_time = timezone_now()

    send_event(user_profile.realm, event, [user_profile.id])
    do_clear_mobile_push_notifications_for_ids([user_profile.id], message_ids)

    do_increment_logging_stat(
        user_profile, COUNT_STATS["messages_read::hour"], None, event_time, increment=count
    )
    do_increment_logging_stat(
        user_profile,
        COUNT_STATS["messages_read_interactions::hour"],
        None,
        event_time,
        increment=min(1, count),
    )
    return count
Пример #4
0
def do_activate_mirror_dummy_user(user_profile: UserProfile, *,
                                  acting_user: Optional[UserProfile]) -> None:
    """Called to have a user "take over" a "mirror dummy" user
    (i.e. is_mirror_dummy=True) account when they sign up with the
    same email address.

    Essentially, the result should be as though we had created the
    UserProfile just now with do_create_user, except that the mirror
    dummy user may appear as the recipient or sender of messages from
    before their account was fully created.

    TODO: This function likely has bugs resulting from this being a
    parallel code path to do_create_user; e.g. it likely does not
    handle preferences or default streams properly.
    """
    with transaction.atomic():
        change_user_is_active(user_profile, True)
        user_profile.is_mirror_dummy = False
        user_profile.set_unusable_password()
        user_profile.date_joined = timezone_now()
        user_profile.tos_version = settings.TERMS_OF_SERVICE_VERSION
        user_profile.save(update_fields=[
            "date_joined", "password", "is_mirror_dummy", "tos_version"
        ])

        event_time = user_profile.date_joined
        RealmAuditLog.objects.create(
            realm=user_profile.realm,
            modified_user=user_profile,
            acting_user=acting_user,
            event_type=RealmAuditLog.USER_ACTIVATED,
            event_time=event_time,
            extra_data=orjson.dumps({
                RealmAuditLog.ROLE_COUNT:
                realm_user_count_by_role(user_profile.realm),
            }).decode(),
        )
        do_increment_logging_stat(
            user_profile.realm,
            COUNT_STATS["active_users_log:is_bot:day"],
            user_profile.is_bot,
            event_time,
        )
        if settings.BILLING_ENABLED:
            update_license_ledger_if_needed(user_profile.realm, event_time)

    notify_created_user(user_profile)
Пример #5
0
    def test_frequency(self):
        # type: () -> None
        times = [self.TIME_ZERO - self.MINUTE*i for i in [0, 1, 61, 24*60+1]]

        stat = LoggingCountStat('day test', RealmCount, CountStat.DAY)
        for time_ in times:
            do_increment_logging_stat(self.default_realm, stat, None, time_)
        stat = LoggingCountStat('hour test', RealmCount, CountStat.HOUR)
        for time_ in times:
            do_increment_logging_stat(self.default_realm, stat, None, time_)

        self.assertTableState(RealmCount, ['value', 'property', 'end_time'],
                              [[3, 'day test', self.TIME_ZERO],
                               [1, 'day test', self.TIME_ZERO - self.DAY],
                               [2, 'hour test', self.TIME_ZERO],
                               [1, 'hour test', self.TIME_LAST_HOUR],
                               [1, 'hour test', self.TIME_ZERO - self.DAY]])
Пример #6
0
def do_mark_all_as_read(user_profile: UserProfile) -> int:
    log_statsd_event("bankruptcy")

    # First, we clear mobile push notifications.  This is safer in the
    # event that the below logic times out and we're killed.
    all_push_message_ids = (
        UserMessage.objects.filter(
            user_profile=user_profile,
        )
        .extra(
            where=[UserMessage.where_active_push_notification()],
        )
        .values_list("message_id", flat=True)[0:10000]
    )
    do_clear_mobile_push_notifications_for_ids([user_profile.id], all_push_message_ids)

    msgs = UserMessage.objects.filter(user_profile=user_profile).extra(
        where=[UserMessage.where_unread()],
    )

    count = msgs.update(
        flags=F("flags").bitor(UserMessage.flags.read),
    )

    event = asdict(
        ReadMessagesEvent(
            messages=[],  # we don't send messages, since the client reloads anyway
            all=True,
        )
    )
    event_time = timezone_now()

    send_event(user_profile.realm, event, [user_profile.id])

    do_increment_logging_stat(
        user_profile, COUNT_STATS["messages_read::hour"], None, event_time, increment=count
    )
    do_increment_logging_stat(
        user_profile,
        COUNT_STATS["messages_read_interactions::hour"],
        None,
        event_time,
        increment=min(1, count),
    )

    return count
Пример #7
0
    def test_frequency(self):
        # type: () -> None
        times = [self.TIME_ZERO - self.MINUTE*i for i in [0, 1, 61, 24*60+1]]

        stat = LoggingCountStat('day test', RealmCount, CountStat.DAY)
        for time_ in times:
            do_increment_logging_stat(self.default_realm, stat, None, time_)
        stat = LoggingCountStat('hour test', RealmCount, CountStat.HOUR)
        for time_ in times:
            do_increment_logging_stat(self.default_realm, stat, None, time_)

        self.assertTableState(RealmCount, ['value', 'property', 'end_time'],
                              [[3, 'day test', self.TIME_ZERO],
                               [1, 'day test', self.TIME_ZERO - self.DAY],
                               [2, 'hour test', self.TIME_ZERO],
                               [1, 'hour test', self.TIME_LAST_HOUR],
                               [1, 'hour test', self.TIME_ZERO - self.DAY]])
Пример #8
0
    def test_aggregation(self):
        # type: () -> None
        stat = LoggingCountStat('realm test', RealmCount, CountStat.DAY)
        do_increment_logging_stat(self.default_realm, stat, None,
                                  self.TIME_ZERO)
        process_count_stat(stat, self.TIME_ZERO)

        user = self.create_user()
        stat = LoggingCountStat('user test', UserCount, CountStat.DAY)
        do_increment_logging_stat(user, stat, None, self.TIME_ZERO)
        process_count_stat(stat, self.TIME_ZERO)

        stream = self.create_stream_with_recipient()[0]
        stat = LoggingCountStat('stream test', StreamCount, CountStat.DAY)
        do_increment_logging_stat(stream, stat, None, self.TIME_ZERO)
        process_count_stat(stat, self.TIME_ZERO)

        self.assertTableState(
            InstallationCount, ['property', 'value'],
            [['realm test', 1], ['user test', 1], ['stream test', 1]])
        self.assertTableState(
            RealmCount, ['property', 'value'],
            [['realm test', 1], ['user test', 1], ['stream test', 1]])
        self.assertTableState(UserCount, ['property', 'value'],
                              [['user test', 1]])
        self.assertTableState(StreamCount, ['property', 'value'],
                              [['stream test', 1]])
Пример #9
0
def do_mark_muted_user_messages_as_read(
    user_profile: UserProfile,
    muted_user: UserProfile,
) -> int:
    messages = UserMessage.objects.filter(
        user_profile=user_profile,
        message__sender=muted_user).extra(where=[UserMessage.where_unread()])

    message_ids = list(messages.values_list("message_id", flat=True))

    if len(message_ids) == 0:
        return 0

    count = messages.update(flags=F("flags").bitor(UserMessage.flags.read), )

    event = asdict(ReadMessagesEvent(
        messages=message_ids,
        all=False,
    ))
    event_time = timezone_now()

    send_event(user_profile.realm, event, [user_profile.id])
    do_clear_mobile_push_notifications_for_ids([user_profile.id], message_ids)

    do_increment_logging_stat(user_profile,
                              COUNT_STATS["messages_read::hour"],
                              None,
                              event_time,
                              increment=count)
    do_increment_logging_stat(
        user_profile,
        COUNT_STATS["messages_read_interactions::hour"],
        None,
        event_time,
        increment=min(1, count),
    )
    return count
Пример #10
0
 def test_increment(self):
     # type: () -> None
     stat = LoggingCountStat('test', RealmCount, CountStat.DAY)
     self.current_property = 'test'
     do_increment_logging_stat(self.default_realm, stat, None, self.TIME_ZERO, increment=-1)
     self.assertTableState(RealmCount, ['value'], [[-1]])
     do_increment_logging_stat(self.default_realm, stat, None, self.TIME_ZERO, increment=3)
     self.assertTableState(RealmCount, ['value'], [[2]])
     do_increment_logging_stat(self.default_realm, stat, None, self.TIME_ZERO)
     self.assertTableState(RealmCount, ['value'], [[3]])
Пример #11
0
 def test_increment(self):
     # type: () -> None
     stat = LoggingCountStat('test', RealmCount, CountStat.DAY)
     self.current_property = 'test'
     do_increment_logging_stat(self.default_realm, stat, None, self.TIME_ZERO, increment=-1)
     self.assertTableState(RealmCount, ['value'], [[-1]])
     do_increment_logging_stat(self.default_realm, stat, None, self.TIME_ZERO, increment=3)
     self.assertTableState(RealmCount, ['value'], [[2]])
     do_increment_logging_stat(self.default_realm, stat, None, self.TIME_ZERO)
     self.assertTableState(RealmCount, ['value'], [[3]])
Пример #12
0
 def test_get_or_create(self):
     # type: () -> None
     stat = LoggingCountStat('test', RealmCount, CountStat.HOUR)
     # All these should trigger the create part of get_or_create.
     # property is tested in test_frequency, and id_args are tested in test_id_args,
     # so this only tests a new subgroup and end_time
     do_increment_logging_stat(self.default_realm, stat, 'subgroup1', self.TIME_ZERO)
     do_increment_logging_stat(self.default_realm, stat, 'subgroup2', self.TIME_ZERO)
     do_increment_logging_stat(self.default_realm, stat, 'subgroup1', self.TIME_LAST_HOUR)
     self.current_property = 'test'
     self.assertTableState(RealmCount, ['value', 'subgroup', 'end_time'],
                           [[1, 'subgroup1', self.TIME_ZERO], [1, 'subgroup2', self.TIME_ZERO],
                           [1, 'subgroup1', self.TIME_LAST_HOUR]])
     # This should trigger the get part of get_or_create
     do_increment_logging_stat(self.default_realm, stat, 'subgroup1', self.TIME_ZERO)
     self.assertTableState(RealmCount, ['value', 'subgroup', 'end_time'],
                           [[2, 'subgroup1', self.TIME_ZERO], [1, 'subgroup2', self.TIME_ZERO],
                           [1, 'subgroup1', self.TIME_LAST_HOUR]])
Пример #13
0
 def test_get_or_create(self):
     # type: () -> None
     stat = LoggingCountStat('test', RealmCount, CountStat.HOUR)
     # All these should trigger the create part of get_or_create.
     # property is tested in test_frequency, and id_args are tested in test_id_args,
     # so this only tests a new subgroup and end_time
     do_increment_logging_stat(self.default_realm, stat, 'subgroup1', self.TIME_ZERO)
     do_increment_logging_stat(self.default_realm, stat, 'subgroup2', self.TIME_ZERO)
     do_increment_logging_stat(self.default_realm, stat, 'subgroup1', self.TIME_LAST_HOUR)
     self.current_property = 'test'
     self.assertTableState(RealmCount, ['value', 'subgroup', 'end_time'],
                           [[1, 'subgroup1', self.TIME_ZERO], [1, 'subgroup2', self.TIME_ZERO],
                           [1, 'subgroup1', self.TIME_LAST_HOUR]])
     # This should trigger the get part of get_or_create
     do_increment_logging_stat(self.default_realm, stat, 'subgroup1', self.TIME_ZERO)
     self.assertTableState(RealmCount, ['value', 'subgroup', 'end_time'],
                           [[2, 'subgroup1', self.TIME_ZERO], [1, 'subgroup2', self.TIME_ZERO],
                           [1, 'subgroup1', self.TIME_LAST_HOUR]])
Пример #14
0
    def test_aggregation(self):
        # type: () -> None
        stat = LoggingCountStat('realm test', RealmCount, CountStat.DAY)
        do_increment_logging_stat(self.default_realm, stat, None, self.TIME_ZERO)
        process_count_stat(stat, self.TIME_ZERO)

        user = self.create_user()
        stat = LoggingCountStat('user test', UserCount, CountStat.DAY)
        do_increment_logging_stat(user, stat, None, self.TIME_ZERO)
        process_count_stat(stat, self.TIME_ZERO)

        stream = self.create_stream_with_recipient()[0]
        stat = LoggingCountStat('stream test', StreamCount, CountStat.DAY)
        do_increment_logging_stat(stream, stat, None, self.TIME_ZERO)
        process_count_stat(stat, self.TIME_ZERO)

        self.assertTableState(InstallationCount, ['property', 'value'],
                              [['realm test', 1], ['user test', 1], ['stream test', 1]])
        self.assertTableState(RealmCount, ['property', 'value'],
                              [['realm test', 1], ['user test', 1], ['stream test', 1]])
        self.assertTableState(UserCount, ['property', 'value'], [['user test', 1]])
        self.assertTableState(StreamCount, ['property', 'value'], [['stream test', 1]])
Пример #15
0
def do_deactivate_user(
    user_profile: UserProfile, _cascade: bool = True, *, acting_user: Optional[UserProfile]
) -> None:
    if not user_profile.is_active:
        return

    if _cascade:
        # We need to deactivate bots before the target user, to ensure
        # that a failure partway through this function cannot result
        # in only the user being deactivated.
        bot_profiles = get_active_bots_owned_by_user(user_profile)
        for profile in bot_profiles:
            do_deactivate_user(profile, _cascade=False, acting_user=acting_user)

    with transaction.atomic():
        if user_profile.realm.is_zephyr_mirror_realm:  # nocoverage
            # For zephyr mirror users, we need to make them a mirror dummy
            # again; otherwise, other users won't get the correct behavior
            # when trying to send messages to this person inside Zulip.
            #
            # Ideally, we need to also ensure their zephyr mirroring bot
            # isn't running, but that's a separate issue.
            user_profile.is_mirror_dummy = True
            user_profile.save(update_fields=["is_mirror_dummy"])

        change_user_is_active(user_profile, False)

        clear_scheduled_emails(user_profile.id)
        revoke_invites_generated_by_user(user_profile)

        event_time = timezone_now()
        RealmAuditLog.objects.create(
            realm=user_profile.realm,
            modified_user=user_profile,
            acting_user=acting_user,
            event_type=RealmAuditLog.USER_DEACTIVATED,
            event_time=event_time,
            extra_data=orjson.dumps(
                {
                    RealmAuditLog.ROLE_COUNT: realm_user_count_by_role(user_profile.realm),
                }
            ).decode(),
        )
        do_increment_logging_stat(
            user_profile.realm,
            COUNT_STATS["active_users_log:is_bot:day"],
            user_profile.is_bot,
            event_time,
            increment=-1,
        )
        if settings.BILLING_ENABLED:
            update_license_ledger_if_needed(user_profile.realm, event_time)

    delete_user_sessions(user_profile)
    event = dict(
        type="realm_user",
        op="remove",
        person=dict(user_id=user_profile.id, full_name=user_profile.full_name),
    )
    send_event(user_profile.realm, event, active_user_ids(user_profile.realm_id))

    if user_profile.is_bot:
        event = dict(
            type="realm_bot",
            op="remove",
            bot=dict(user_id=user_profile.id, full_name=user_profile.full_name),
        )
        send_event(user_profile.realm, event, bot_owner_user_ids(user_profile))
Пример #16
0
def do_update_message_flags(
    user_profile: UserProfile, operation: str, flag: str, messages: List[int]
) -> int:
    valid_flags = [item for item in UserMessage.flags if item not in UserMessage.NON_API_FLAGS]
    if flag not in valid_flags:
        raise JsonableError(_("Invalid flag: '{}'").format(flag))
    if flag in UserMessage.NON_EDITABLE_FLAGS:
        raise JsonableError(_("Flag not editable: '{}'").format(flag))
    if operation not in ("add", "remove"):
        raise JsonableError(_("Invalid message flag operation: '{}'").format(operation))
    flagattr = getattr(UserMessage.flags, flag)

    msgs = UserMessage.objects.filter(user_profile=user_profile, message_id__in=messages)
    um_message_ids = {um.message_id for um in msgs}
    historical_message_ids = list(set(messages) - um_message_ids)

    # Users can mutate flags for messages that don't have a UserMessage yet.
    # First, validate that the user is even allowed to access these message_ids.
    for message_id in historical_message_ids:
        access_message(user_profile, message_id)

    # And then create historical UserMessage records.  See the called function for more context.
    create_historical_user_messages(user_id=user_profile.id, message_ids=historical_message_ids)

    if operation == "add":
        count = msgs.update(flags=F("flags").bitor(flagattr))
    elif operation == "remove":
        count = msgs.update(flags=F("flags").bitand(~flagattr))

    event = {
        "type": "update_message_flags",
        "op": operation,
        "operation": operation,
        "flag": flag,
        "messages": messages,
        "all": False,
    }

    if flag == "read" and operation == "remove":
        # When removing the read flag (i.e. marking messages as
        # unread), extend the event with an additional object with
        # details on the messages required to update the client's
        # `unread_msgs` data structure.
        raw_unread_data = get_raw_unread_data(user_profile, messages)
        event["message_details"] = format_unread_message_details(user_profile.id, raw_unread_data)

    send_event(user_profile.realm, event, [user_profile.id])

    if flag == "read" and operation == "add":
        event_time = timezone_now()
        do_clear_mobile_push_notifications_for_ids([user_profile.id], messages)

        do_increment_logging_stat(
            user_profile, COUNT_STATS["messages_read::hour"], None, event_time, increment=count
        )
        do_increment_logging_stat(
            user_profile,
            COUNT_STATS["messages_read_interactions::hour"],
            None,
            event_time,
            increment=min(1, count),
        )

    return count
Пример #17
0
    def test_table_and_id_args(self):
        # type: () -> None
        # For realms, streams, and users, tests that the new rows are going to
        # the appropriate *Count table, and that using a different zerver_object
        # results in a new row being created
        self.current_property = 'test'
        second_realm = Realm.objects.create(string_id='moo', name='moo', domain='moo')
        stat = LoggingCountStat('test', RealmCount, CountStat.DAY)
        do_increment_logging_stat(self.default_realm, stat, None, self.TIME_ZERO)
        do_increment_logging_stat(second_realm, stat, None, self.TIME_ZERO)
        self.assertTableState(RealmCount, ['realm'], [[self.default_realm], [second_realm]])

        user1 = self.create_user()
        user2 = self.create_user()
        stat = LoggingCountStat('test', UserCount, CountStat.DAY)
        do_increment_logging_stat(user1, stat, None, self.TIME_ZERO)
        do_increment_logging_stat(user2, stat, None, self.TIME_ZERO)
        self.assertTableState(UserCount, ['user'], [[user1], [user2]])

        stream1 = self.create_stream_with_recipient()[0]
        stream2 = self.create_stream_with_recipient()[0]
        stat = LoggingCountStat('test', StreamCount, CountStat.DAY)
        do_increment_logging_stat(stream1, stat, None, self.TIME_ZERO)
        do_increment_logging_stat(stream2, stat, None, self.TIME_ZERO)
        self.assertTableState(StreamCount, ['stream'], [[stream1], [stream2]])
Пример #18
0
def do_create_user(
    email: str,
    password: Optional[str],
    realm: Realm,
    full_name: str,
    bot_type: Optional[int] = None,
    role: Optional[int] = None,
    bot_owner: Optional[UserProfile] = None,
    tos_version: Optional[str] = None,
    timezone: str = "",
    avatar_source: str = UserProfile.AVATAR_FROM_GRAVATAR,
    default_language: str = "en",
    default_sending_stream: Optional[Stream] = None,
    default_events_register_stream: Optional[Stream] = None,
    default_all_public_streams: Optional[bool] = None,
    prereg_user: Optional[PreregistrationUser] = None,
    default_stream_groups: Sequence[DefaultStreamGroup] = [],
    source_profile: Optional[UserProfile] = None,
    realm_creation: bool = False,
    *,
    acting_user: Optional[UserProfile],
    enable_marketing_emails: bool = True,
) -> UserProfile:
    with transaction.atomic():
        user_profile = create_user(
            email=email,
            password=password,
            realm=realm,
            full_name=full_name,
            role=role,
            bot_type=bot_type,
            bot_owner=bot_owner,
            tos_version=tos_version,
            timezone=timezone,
            avatar_source=avatar_source,
            default_language=default_language,
            default_sending_stream=default_sending_stream,
            default_events_register_stream=default_events_register_stream,
            default_all_public_streams=default_all_public_streams,
            source_profile=source_profile,
            enable_marketing_emails=enable_marketing_emails,
        )

        event_time = user_profile.date_joined
        if not acting_user:
            acting_user = user_profile
        RealmAuditLog.objects.create(
            realm=user_profile.realm,
            acting_user=acting_user,
            modified_user=user_profile,
            event_type=RealmAuditLog.USER_CREATED,
            event_time=event_time,
            extra_data=orjson.dumps({
                RealmAuditLog.ROLE_COUNT:
                realm_user_count_by_role(user_profile.realm),
            }).decode(),
        )

        if realm_creation:
            # If this user just created a realm, make sure they are
            # properly tagged as the creator of the realm.
            realm_creation_audit_log = (RealmAuditLog.objects.filter(
                event_type=RealmAuditLog.REALM_CREATED,
                realm=realm).order_by("id").last())
            assert realm_creation_audit_log is not None
            realm_creation_audit_log.acting_user = user_profile
            realm_creation_audit_log.save(update_fields=["acting_user"])

        do_increment_logging_stat(
            user_profile.realm,
            COUNT_STATS["active_users_log:is_bot:day"],
            user_profile.is_bot,
            event_time,
        )
        if settings.BILLING_ENABLED:
            update_license_ledger_if_needed(user_profile.realm, event_time)

        system_user_group = get_system_user_group_for_user(user_profile)
        UserGroupMembership.objects.create(user_profile=user_profile,
                                           user_group=system_user_group)

        if user_profile.role == UserProfile.ROLE_MEMBER and not user_profile.is_provisional_member:
            full_members_system_group = UserGroup.objects.get(
                name="@role:fullmembers",
                realm=user_profile.realm,
                is_system_group=True)
            UserGroupMembership.objects.create(
                user_profile=user_profile,
                user_group=full_members_system_group)

    # Note that for bots, the caller will send an additional event
    # with bot-specific info like services.
    notify_created_user(user_profile)

    do_send_user_group_members_update_event("add_members", system_user_group,
                                            [user_profile.id])
    if user_profile.role == UserProfile.ROLE_MEMBER and not user_profile.is_provisional_member:
        do_send_user_group_members_update_event("add_members",
                                                full_members_system_group,
                                                [user_profile.id])

    if bot_type is None:
        process_new_human_user(
            user_profile,
            prereg_user=prereg_user,
            default_stream_groups=default_stream_groups,
            realm_creation=realm_creation,
        )

    if realm_creation:
        assert realm.signup_notifications_stream is not None
        bulk_add_subscriptions(realm, [realm.signup_notifications_stream],
                               [user_profile],
                               acting_user=None)

        from zerver.lib.onboarding import send_initial_realm_messages

        send_initial_realm_messages(realm)

    return user_profile
Пример #19
0
def do_invite_users(
    user_profile: UserProfile,
    invitee_emails: Collection[str],
    streams: Collection[Stream],
    *,
    invite_expires_in_minutes: Optional[int],
    invite_as: int = PreregistrationUser.INVITE_AS["MEMBER"],
) -> None:
    num_invites = len(invitee_emails)

    check_invite_limit(user_profile.realm, num_invites)
    if settings.BILLING_ENABLED:
        from corporate.lib.registration import check_spare_licenses_available_for_inviting_new_users

        check_spare_licenses_available_for_inviting_new_users(
            user_profile.realm, num_invites)

    realm = user_profile.realm
    if not realm.invite_required:
        # Inhibit joining an open realm to send spam invitations.
        min_age = datetime.timedelta(days=settings.INVITES_MIN_USER_AGE_DAYS)
        if user_profile.date_joined > timezone_now(
        ) - min_age and not user_profile.is_realm_admin:
            raise InvitationError(
                _("Your account is too new to send invites for this organization. "
                  "Ask an organization admin, or a more experienced user."),
                [],
                sent_invitations=False,
            )

    good_emails: Set[str] = set()
    errors: List[Tuple[str, str, bool]] = []
    validate_email_allowed_in_realm = get_realm_email_validator(
        user_profile.realm)
    for email in invitee_emails:
        if email == "":
            continue
        email_error = validate_email_is_valid(
            email,
            validate_email_allowed_in_realm,
        )

        if email_error:
            errors.append((email, email_error, False))
        else:
            good_emails.add(email)
    """
    good_emails are emails that look ok so far,
    but we still need to make sure they're not
    gonna conflict with existing users
    """
    error_dict = get_existing_user_errors(user_profile.realm, good_emails)

    skipped: List[Tuple[str, str, bool]] = []
    for email in error_dict:
        msg, deactivated = error_dict[email]
        skipped.append((email, msg, deactivated))
        good_emails.remove(email)

    validated_emails = list(good_emails)

    if errors:
        raise InvitationError(
            _("Some emails did not validate, so we didn't send any invitations."
              ),
            errors + skipped,
            sent_invitations=False,
        )

    if skipped and len(skipped) == len(invitee_emails):
        # All e-mails were skipped, so we didn't actually invite anyone.
        raise InvitationError(_("We weren't able to invite anyone."),
                              skipped,
                              sent_invitations=False)

    # We do this here rather than in the invite queue processor since this
    # is used for rate limiting invitations, rather than keeping track of
    # when exactly invitations were sent
    do_increment_logging_stat(
        user_profile.realm,
        COUNT_STATS["invites_sent::day"],
        None,
        timezone_now(),
        increment=len(validated_emails),
    )

    # Now that we are past all the possible errors, we actually create
    # the PreregistrationUser objects and trigger the email invitations.
    for email in validated_emails:
        # The logged in user is the referrer.
        prereg_user = PreregistrationUser(email=email,
                                          referred_by=user_profile,
                                          invited_as=invite_as,
                                          realm=user_profile.realm)
        prereg_user.save()
        stream_ids = [stream.id for stream in streams]
        prereg_user.streams.set(stream_ids)

        event = {
            "prereg_id": prereg_user.id,
            "referrer_id": user_profile.id,
            "email_language": user_profile.realm.default_language,
            "invite_expires_in_minutes": invite_expires_in_minutes,
        }
        queue_json_publish("invites", event)

    if skipped:
        raise InvitationError(
            _("Some of those addresses are already using Zulip, "
              "so we didn't send them an invitation. We did send "
              "invitations to everyone else!"),
            skipped,
            sent_invitations=True,
        )
    notify_invites_changed(user_profile.realm)
Пример #20
0
    def test_table_and_id_args(self):
        # type: () -> None
        # For realms, streams, and users, tests that the new rows are going to
        # the appropriate *Count table, and that using a different zerver_object
        # results in a new row being created
        self.current_property = 'test'
        second_realm = Realm.objects.create(string_id='moo',
                                            name='moo',
                                            domain='moo')
        stat = LoggingCountStat('test', RealmCount, CountStat.DAY)
        do_increment_logging_stat(self.default_realm, stat, None,
                                  self.TIME_ZERO)
        do_increment_logging_stat(second_realm, stat, None, self.TIME_ZERO)
        self.assertTableState(RealmCount, ['realm'],
                              [[self.default_realm], [second_realm]])

        user1 = self.create_user()
        user2 = self.create_user()
        stat = LoggingCountStat('test', UserCount, CountStat.DAY)
        do_increment_logging_stat(user1, stat, None, self.TIME_ZERO)
        do_increment_logging_stat(user2, stat, None, self.TIME_ZERO)
        self.assertTableState(UserCount, ['user'], [[user1], [user2]])

        stream1 = self.create_stream_with_recipient()[0]
        stream2 = self.create_stream_with_recipient()[0]
        stat = LoggingCountStat('test', StreamCount, CountStat.DAY)
        do_increment_logging_stat(stream1, stat, None, self.TIME_ZERO)
        do_increment_logging_stat(stream2, stat, None, self.TIME_ZERO)
        self.assertTableState(StreamCount, ['stream'], [[stream1], [stream2]])