Beispiel #1
0
    def consume(self, event: Dict[str, Any]) -> None:
        if event['type'] == 'mark_stream_messages_as_read':
            user_profile = get_user_profile_by_id(event['user_profile_id'])
            client = Client.objects.get(id=event['client_id'])

            for stream_id in event['stream_ids']:
                # Since the user just unsubscribed, we don't require
                # an active Subscription object (otherwise, private
                # streams would never be accessible)
                (stream, recipient,
                 sub) = access_stream_by_id(user_profile,
                                            stream_id,
                                            require_active=False)
                do_mark_stream_messages_as_read(user_profile, client, stream)
        elif event['type'] == 'clear_push_device_tokens':
            try:
                clear_push_device_tokens(event["user_profile_id"])
            except PushNotificationBouncerRetryLaterError:

                def failure_processor(event: Dict[str, Any]) -> None:
                    logger.warning(
                        "Maximum retries exceeded for trigger:%s event:clear_push_device_tokens"
                        % (event['user_profile_id'], ))

                retry_event(self.queue_name, event, failure_processor)
        elif event['type'] == 'realm_export':
            start = time.time()
            realm = Realm.objects.get(id=event['realm_id'])
            output_dir = tempfile.mkdtemp(prefix="zulip-export-")

            public_url = export_realm_wrapper(realm=realm,
                                              output_dir=output_dir,
                                              threads=6,
                                              upload=True,
                                              public_only=True,
                                              delete_after_upload=True)
            assert public_url is not None

            # Update the extra_data field now that the export is complete.
            export_event = RealmAuditLog.objects.get(id=event['id'])
            export_event.extra_data = ujson.dumps(
                dict(export_path=urllib.parse.urlparse(public_url).path, ))
            export_event.save(update_fields=['extra_data'])

            # Send a private message notification letting the user who
            # triggered the export know the export finished.
            user_profile = get_user_profile_by_id(event['user_profile_id'])
            content = "Your data export is complete and has been uploaded here:\n\n%s" % (
                public_url, )
            internal_send_private_message(realm=user_profile.realm,
                                          sender=get_system_bot(
                                              settings.NOTIFICATION_BOT),
                                          recipient_user=user_profile,
                                          content=content)

            # For future frontend use, also notify administrator
            # clients that the export happened.
            notify_realm_export(user_profile)
            logging.info("Completed data export for %s in %s" %
                         (user_profile.realm.string_id, time.time() - start))
Beispiel #2
0
def export_realm(request: HttpRequest, user: UserProfile) -> HttpResponse:
    # Currently only supports public-data-only exports.
    event_type = RealmAuditLog.REALM_EXPORTED
    event_time = timezone_now()
    realm = user.realm
    EXPORT_LIMIT = 5
    # Conservative limit on the size of message history in
    # organizations being exported; this exists to protect Zulip
    # against a possible unmonitored accidental DoS caused by trying
    # to export an organization with huge history.
    MAX_MESSAGE_HISTORY = 250000
    MAX_UPLOAD_QUOTA = 10 * 1024 * 1024 * 1024

    # Filter based upon the number of events that have occurred in the delta
    # If we are at the limit, the incoming request is rejected
    event_time_delta = event_time - timedelta(days=7)
    limit_check = RealmAuditLog.objects.filter(
        realm=realm, event_type=event_type, event_time__gte=event_time_delta
    )
    if len(limit_check) >= EXPORT_LIMIT:
        raise JsonableError(_("Exceeded rate limit."))

    total_messages = sum(
        realm_count.value
        for realm_count in RealmCount.objects.filter(
            realm=user.realm, property="messages_sent:client:day"
        )
    )
    if (
        total_messages > MAX_MESSAGE_HISTORY
        or user.realm.currently_used_upload_space_bytes() > MAX_UPLOAD_QUOTA
    ):
        raise JsonableError(
            _("Please request a manual export from {email}.").format(
                email=settings.ZULIP_ADMINISTRATOR,
            )
        )

    row = RealmAuditLog.objects.create(
        realm=realm, event_type=event_type, event_time=event_time, acting_user=user
    )

    # Allow for UI updates on a pending export
    notify_realm_export(user)

    # Using the deferred_work queue processor to avoid
    # killing the process after 60s
    event = {
        "type": "realm_export",
        "time": event_time,
        "realm_id": realm.id,
        "user_profile_id": user.id,
        "id": row.id,
    }
    queue_json_publish("deferred_work", event)
    return json_success()
Beispiel #3
0
    def consume(self, event: Mapping[str, Any]) -> None:
        if event['type'] == 'mark_stream_messages_as_read':
            user_profile = get_user_profile_by_id(event['user_profile_id'])
            client = Client.objects.get(id=event['client_id'])

            for stream_id in event['stream_ids']:
                # Since the user just unsubscribed, we don't require
                # an active Subscription object (otherwise, private
                # streams would never be accessible)
                (stream, recipient,
                 sub) = access_stream_by_id(user_profile,
                                            stream_id,
                                            require_active=False)
                do_mark_stream_messages_as_read(user_profile, client, stream)
        elif event['type'] == 'realm_export':
            realm = Realm.objects.get(id=event['realm_id'])
            output_dir = tempfile.mkdtemp(prefix="zulip-export-")

            public_url = export_realm_wrapper(realm=realm,
                                              output_dir=output_dir,
                                              threads=6,
                                              upload=True,
                                              public_only=True,
                                              delete_after_upload=True)
            assert public_url is not None

            # Store the relative URL of the export.
            export_event = RealmAuditLog.objects.get(id=event['id'])
            export_event.extra_data = ujson.dumps({
                'export_path':
                urllib.parse.urlparse(public_url).path,
                'deleted_timestamp':
                None
            })
            export_event.save(update_fields=['extra_data'])

            # Send a private message notification letting the user who
            # triggered the export know the export finished.
            user_profile = get_user_profile_by_id(event['user_profile_id'])
            content = "Your data export is complete and has been uploaded here:\n\n%s" % (
                public_url, )
            internal_send_private_message(realm=user_profile.realm,
                                          sender=get_system_bot(
                                              settings.NOTIFICATION_BOT),
                                          recipient_user=user_profile,
                                          content=content)

            # For future frontend use, also notify administrator
            # clients that the export happened.
            notify_realm_export(user_profile)
Beispiel #4
0
    def consume(self, event: Dict[str, Any]) -> None:
        start = time.time()
        if event["type"] == "mark_stream_messages_as_read":
            user_profile = get_user_profile_by_id(event["user_profile_id"])

            for recipient_id in event["stream_recipient_ids"]:
                count = do_mark_stream_messages_as_read(
                    user_profile, recipient_id)
                logger.info(
                    "Marked %s messages as read for user %s, stream_recipient_id %s",
                    count,
                    user_profile.id,
                    recipient_id,
                )
        elif event["type"] == "mark_stream_messages_as_read_for_everyone":
            # This event is generated by the stream deactivation code path.
            batch_size = 100
            offset = 0
            while True:
                messages = Message.objects.filter(
                    recipient_id=event["stream_recipient_id"]).order_by(
                        "id")[offset:offset + batch_size]
                UserMessage.objects.filter(message__in=messages).extra(
                    where=[UserMessage.where_unread()]).update(
                        flags=F("flags").bitor(UserMessage.flags.read))
                offset += len(messages)
                if len(messages) < batch_size:
                    break
            logger.info(
                "Marked %s messages as read for all users, stream_recipient_id %s",
                offset,
                event["stream_recipient_id"],
            )
        elif event["type"] == "clear_push_device_tokens":
            try:
                clear_push_device_tokens(event["user_profile_id"])
            except PushNotificationBouncerRetryLaterError:

                def failure_processor(event: Dict[str, Any]) -> None:
                    logger.warning(
                        "Maximum retries exceeded for trigger:%s event:clear_push_device_tokens",
                        event["user_profile_id"],
                    )

                retry_event(self.queue_name, event, failure_processor)
        elif event["type"] == "realm_export":
            realm = Realm.objects.get(id=event["realm_id"])
            output_dir = tempfile.mkdtemp(prefix="zulip-export-")
            export_event = RealmAuditLog.objects.get(id=event["id"])
            user_profile = get_user_profile_by_id(event["user_profile_id"])

            try:
                public_url = export_realm_wrapper(
                    realm=realm,
                    output_dir=output_dir,
                    threads=6,
                    upload=True,
                    public_only=True,
                    delete_after_upload=True,
                )
            except Exception:
                export_event.extra_data = orjson.dumps(
                    dict(failed_timestamp=timezone_now().timestamp(),
                         )).decode()
                export_event.save(update_fields=["extra_data"])
                logging.error(
                    "Data export for %s failed after %s",
                    user_profile.realm.string_id,
                    time.time() - start,
                )
                notify_realm_export(user_profile)
                return

            assert public_url is not None

            # Update the extra_data field now that the export is complete.
            export_event.extra_data = orjson.dumps(
                dict(export_path=urllib.parse.urlparse(public_url).path,
                     )).decode()
            export_event.save(update_fields=["extra_data"])

            # Send a private message notification letting the user who
            # triggered the export know the export finished.
            with override_language(user_profile.default_language):
                content = _(
                    "Your data export is complete and has been uploaded here:\n\n{public_url}"
                ).format(public_url=public_url)
            internal_send_private_message(
                sender=get_system_bot(settings.NOTIFICATION_BOT, realm.id),
                recipient_user=user_profile,
                content=content,
            )

            # For future frontend use, also notify administrator
            # clients that the export happened.
            notify_realm_export(user_profile)
            logging.info(
                "Completed data export for %s in %s",
                user_profile.realm.string_id,
                time.time() - start,
            )

        end = time.time()
        logger.info("deferred_work processed %s event (%dms)", event["type"],
                    (end - start) * 1000)
Beispiel #5
0
    def consume(self, event: Dict[str, Any]) -> None:
        if event['type'] == 'mark_stream_messages_as_read':
            user_profile = get_user_profile_by_id(event['user_profile_id'])
            client = Client.objects.get(id=event['client_id'])

            for stream_id in event['stream_ids']:
                # Since the user just unsubscribed, we don't require
                # an active Subscription object (otherwise, private
                # streams would never be accessible)
                (stream, recipient, sub) = access_stream_by_id(user_profile, stream_id,
                                                               require_active=False)
                do_mark_stream_messages_as_read(user_profile, client, stream)
        elif event["type"] == 'mark_stream_messages_as_read_for_everyone':
            # This event is generated by the stream deactivation code path.
            batch_size = 100
            offset = 0
            while True:
                messages = Message.objects.filter(recipient_id=event["stream_recipient_id"]) \
                    .order_by("id")[offset:offset + batch_size]
                UserMessage.objects.filter(message__in=messages).extra(where=[UserMessage.where_unread()]) \
                    .update(flags=F('flags').bitor(UserMessage.flags.read))
                offset += len(messages)
                if len(messages) < batch_size:
                    break
        elif event['type'] == 'clear_push_device_tokens':
            try:
                clear_push_device_tokens(event["user_profile_id"])
            except PushNotificationBouncerRetryLaterError:
                def failure_processor(event: Dict[str, Any]) -> None:
                    logger.warning(
                        "Maximum retries exceeded for trigger:%s event:clear_push_device_tokens",
                        event['user_profile_id'])
                retry_event(self.queue_name, event, failure_processor)
        elif event['type'] == 'realm_export':
            start = time.time()
            realm = Realm.objects.get(id=event['realm_id'])
            output_dir = tempfile.mkdtemp(prefix="zulip-export-")
            export_event = RealmAuditLog.objects.get(id=event['id'])
            user_profile = get_user_profile_by_id(event['user_profile_id'])

            try:
                public_url = export_realm_wrapper(realm=realm, output_dir=output_dir,
                                                  threads=6, upload=True, public_only=True,
                                                  delete_after_upload=True)
            except Exception:
                export_event.extra_data = orjson.dumps(dict(
                    failed_timestamp=timezone_now().timestamp(),
                )).decode()
                export_event.save(update_fields=['extra_data'])
                logging.error(
                    "Data export for %s failed after %s",
                    user_profile.realm.string_id, time.time() - start,
                )
                notify_realm_export(user_profile)
                return

            assert public_url is not None

            # Update the extra_data field now that the export is complete.
            export_event.extra_data = orjson.dumps(dict(
                export_path=urllib.parse.urlparse(public_url).path,
            )).decode()
            export_event.save(update_fields=['extra_data'])

            # Send a private message notification letting the user who
            # triggered the export know the export finished.
            with override_language(user_profile.default_language):
                content = _("Your data export is complete and has been uploaded here:\n\n{public_url}").format(public_url=public_url)
            internal_send_private_message(
                realm=user_profile.realm,
                sender=get_system_bot(settings.NOTIFICATION_BOT),
                recipient_user=user_profile,
                content=content,
            )

            # For future frontend use, also notify administrator
            # clients that the export happened.
            notify_realm_export(user_profile)
            logging.info(
                "Completed data export for %s in %s",
                user_profile.realm.string_id, time.time() - start,
            )