Exemplo n.º 1
0
    def handle(self, *args: Any, **options: Any) -> None:
        realm = self.get_realm(options)
        assert realm is not None  # Should be ensured by parser

        output_dir = options["output_dir"]
        if output_dir is None:
            output_dir = tempfile.mkdtemp(prefix="zulip-export-")
        else:
            output_dir = os.path.realpath(os.path.expanduser(output_dir))
        if os.path.exists(output_dir):
            shutil.rmtree(output_dir)
        os.makedirs(output_dir)
        print("Exporting realm %s" % (realm.string_id, ))
        num_threads = int(options['threads'])
        if num_threads < 1:
            raise CommandError('You must have at least one thread.')

        # Allows us to trigger exports separately from command line argument parsing
        export_realm_wrapper(
            realm=realm,
            output_dir=output_dir,
            threads=num_threads,
            upload_to_s3=options['upload_to_s3'],
            public_only=options["public_only"],
            delete_after_upload=options["delete_after_upload"])
Exemplo n.º 2
0
    def consume(self, event: Dict[str, Any]) -> None:
        if event['type'] == 'mark_stream_messages_as_read':
            user_profile = get_user_profile_by_id(event['user_profile_id'])
            client = Client.objects.get(id=event['client_id'])

            for stream_id in event['stream_ids']:
                # Since the user just unsubscribed, we don't require
                # an active Subscription object (otherwise, private
                # streams would never be accessible)
                (stream, recipient,
                 sub) = access_stream_by_id(user_profile,
                                            stream_id,
                                            require_active=False)
                do_mark_stream_messages_as_read(user_profile, client, stream)
        elif event['type'] == 'clear_push_device_tokens':
            try:
                clear_push_device_tokens(event["user_profile_id"])
            except PushNotificationBouncerRetryLaterError:

                def failure_processor(event: Dict[str, Any]) -> None:
                    logger.warning(
                        "Maximum retries exceeded for trigger:%s event:clear_push_device_tokens"
                        % (event['user_profile_id'], ))

                retry_event(self.queue_name, event, failure_processor)
        elif event['type'] == 'realm_export':
            start = time.time()
            realm = Realm.objects.get(id=event['realm_id'])
            output_dir = tempfile.mkdtemp(prefix="zulip-export-")

            public_url = export_realm_wrapper(realm=realm,
                                              output_dir=output_dir,
                                              threads=6,
                                              upload=True,
                                              public_only=True,
                                              delete_after_upload=True)
            assert public_url is not None

            # Update the extra_data field now that the export is complete.
            export_event = RealmAuditLog.objects.get(id=event['id'])
            export_event.extra_data = ujson.dumps(
                dict(export_path=urllib.parse.urlparse(public_url).path, ))
            export_event.save(update_fields=['extra_data'])

            # Send a private message notification letting the user who
            # triggered the export know the export finished.
            user_profile = get_user_profile_by_id(event['user_profile_id'])
            content = "Your data export is complete and has been uploaded here:\n\n%s" % (
                public_url, )
            internal_send_private_message(realm=user_profile.realm,
                                          sender=get_system_bot(
                                              settings.NOTIFICATION_BOT),
                                          recipient_user=user_profile,
                                          content=content)

            # For future frontend use, also notify administrator
            # clients that the export happened.
            notify_realm_export(user_profile)
            logging.info("Completed data export for %s in %s" %
                         (user_profile.realm.string_id, time.time() - start))
Exemplo n.º 3
0
    def consume(self, event: Mapping[str, Any]) -> None:
        if event['type'] == 'mark_stream_messages_as_read':
            user_profile = get_user_profile_by_id(event['user_profile_id'])
            client = Client.objects.get(id=event['client_id'])

            for stream_id in event['stream_ids']:
                # Since the user just unsubscribed, we don't require
                # an active Subscription object (otherwise, private
                # streams would never be accessible)
                (stream, recipient,
                 sub) = access_stream_by_id(user_profile,
                                            stream_id,
                                            require_active=False)
                do_mark_stream_messages_as_read(user_profile, client, stream)
        elif event['type'] == 'realm_export':
            realm = Realm.objects.get(id=event['realm_id'])
            output_dir = tempfile.mkdtemp(prefix="zulip-export-")

            public_url = export_realm_wrapper(realm=realm,
                                              output_dir=output_dir,
                                              threads=6,
                                              upload=True,
                                              public_only=True,
                                              delete_after_upload=True)
            assert public_url is not None

            # Store the relative URL of the export.
            export_event = RealmAuditLog.objects.get(id=event['id'])
            export_event.extra_data = ujson.dumps({
                'export_path':
                urllib.parse.urlparse(public_url).path,
                'deleted_timestamp':
                None
            })
            export_event.save(update_fields=['extra_data'])

            # Send a private message notification letting the user who
            # triggered the export know the export finished.
            user_profile = get_user_profile_by_id(event['user_profile_id'])
            content = "Your data export is complete and has been uploaded here:\n\n%s" % (
                public_url, )
            internal_send_private_message(realm=user_profile.realm,
                                          sender=get_system_bot(
                                              settings.NOTIFICATION_BOT),
                                          recipient_user=user_profile,
                                          content=content)

            # For future frontend use, also notify administrator
            # clients that the export happened.
            notify_realm_export(user_profile)
Exemplo n.º 4
0
    def consume(self, event: Mapping[str, Any]) -> None:
        if event['type'] == 'mark_stream_messages_as_read':
            user_profile = get_user_profile_by_id(event['user_profile_id'])
            client = Client.objects.get(id=event['client_id'])

            for stream_id in event['stream_ids']:
                # Since the user just unsubscribed, we don't require
                # an active Subscription object (otherwise, private
                # streams would never be accessible)
                (stream, recipient,
                 sub) = access_stream_by_id(user_profile,
                                            stream_id,
                                            require_active=False)
                do_mark_stream_messages_as_read(user_profile, client, stream)
        elif event['type'] == 'realm_exported':
            realm = Realm.objects.get(id=event['realm_id'])
            output_dir = tempfile.mkdtemp(prefix="zulip-export-")

            # TODO: Add support for the LOCAL_UPLOADS_DIR uploads
            # backend in export_realm_wrapper so we don't need this assertion.
            assert settings.LOCAL_UPLOADS_DIR is None

            public_url = export_realm_wrapper(realm=realm,
                                              output_dir=output_dir,
                                              threads=6,
                                              upload=True,
                                              public_only=True,
                                              delete_after_upload=True)
            assert public_url is not None

            # Send a private message notification letting the user who
            # triggered the export know the export finished.
            user_profile = get_user_profile_by_id(event['user_profile_id'])
            content = "Your data export is complete and has been uploaded here:\n\n%s" % (
                public_url, )
            internal_send_private_message(realm=user_profile.realm,
                                          sender=get_system_bot(
                                              settings.NOTIFICATION_BOT),
                                          recipient_user=user_profile,
                                          content=content)

            # For future frontend use, also notify administrator
            # clients that the export happened, including sending the
            # url.
            notify_export_completed(user_profile, public_url)
Exemplo n.º 5
0
    def handle(self, *args: Any, **options: Any) -> None:
        realm = self.get_realm(options)
        assert realm is not None  # Should be ensured by parser

        output_dir = options["output_dir"]
        public_only = options["public_only"]
        consent_message_id = options["consent_message_id"]

        print(f"\033[94mExporting realm\033[0m: {realm.string_id}")

        num_threads = int(options["threads"])
        if num_threads < 1:
            raise CommandError("You must have at least one thread.")

        if public_only and consent_message_id is not None:
            raise CommandError(
                "Please pass either --public-only or --consent-message-id")

        if options["deactivate_realm"] and realm.deactivated:
            raise CommandError(
                f"The realm {realm.string_id} is already deactivated.  Aborting..."
            )

        if consent_message_id is not None:
            try:
                message = Message.objects.get(id=consent_message_id)
            except Message.DoesNotExist:
                raise CommandError(
                    "Message with given ID does not exist. Aborting...")

            if message.last_edit_time is not None:
                raise CommandError("Message was edited. Aborting...")

            # Since the message might have been sent by
            # Notification Bot, we can't trivially check the realm of
            # the message through message.sender.realm.  So instead we
            # check the realm of the people who reacted to the message
            # (who must all be in the message's realm).
            reactions = Reaction.objects.filter(
                message=message,
                # outbox = 1f4e4
                emoji_code="1f4e4",
                reaction_type="unicode_emoji",
            )
            for reaction in reactions:
                if reaction.user_profile.realm != realm:
                    raise CommandError(
                        "Users from a different realm reacted to message. Aborting..."
                    )

            print(f"\n\033[94mMessage content:\033[0m\n{message.content}\n")

            user_count = (
                UserProfile.objects.filter(
                    realm_id=realm.id,
                    is_active=True,
                    is_bot=False,
                ).exclude(
                    # We exclude guests, because they're not a priority for
                    # looking at whether most users are being exported.
                    role=UserProfile.ROLE_GUEST, ).count())
            print(
                f"\033[94mNumber of users that reacted outbox:\033[0m {len(reactions)} / {user_count} total non-guest users\n"
            )

            proceed = input("Continue? [y/N] ")
            if proceed.lower() not in ("y", "yes"):
                raise CommandError("Aborting!")

        if output_dir is None:
            output_dir = tempfile.mkdtemp(prefix="zulip-export-")
        else:
            output_dir = os.path.realpath(os.path.expanduser(output_dir))
            if os.path.exists(output_dir):
                if os.listdir(output_dir):
                    raise CommandError(
                        f"Refusing to overwrite nonempty directory: {output_dir}. Aborting...",
                    )
            else:
                os.makedirs(output_dir)

        tarball_path = output_dir.rstrip("/") + ".tar.gz"
        try:
            with open(tarball_path, "x"):
                pass
        except FileExistsError:
            raise CommandError(
                f"Refusing to overwrite existing tarball: {tarball_path}. Aborting..."
            )

        if options["deactivate_realm"]:
            print(f"\033[94mDeactivating realm\033[0m: {realm.string_id}")
            do_deactivate_realm(realm, acting_user=None)

        def percent_callback(bytes_transferred: Any) -> None:
            print(end=".", flush=True)

        # Allows us to trigger exports separately from command line argument parsing
        export_realm_wrapper(
            realm=realm,
            output_dir=output_dir,
            threads=num_threads,
            upload=options["upload"],
            public_only=public_only,
            percent_callback=percent_callback,
            consent_message_id=consent_message_id,
        )
Exemplo n.º 6
0
    def consume(self, event: Dict[str, Any]) -> None:
        start = time.time()
        if event["type"] == "mark_stream_messages_as_read":
            user_profile = get_user_profile_by_id(event["user_profile_id"])

            for recipient_id in event["stream_recipient_ids"]:
                count = do_mark_stream_messages_as_read(
                    user_profile, recipient_id)
                logger.info(
                    "Marked %s messages as read for user %s, stream_recipient_id %s",
                    count,
                    user_profile.id,
                    recipient_id,
                )
        elif event["type"] == "mark_stream_messages_as_read_for_everyone":
            # This event is generated by the stream deactivation code path.
            batch_size = 100
            offset = 0
            while True:
                messages = Message.objects.filter(
                    recipient_id=event["stream_recipient_id"]).order_by(
                        "id")[offset:offset + batch_size]
                UserMessage.objects.filter(message__in=messages).extra(
                    where=[UserMessage.where_unread()]).update(
                        flags=F("flags").bitor(UserMessage.flags.read))
                offset += len(messages)
                if len(messages) < batch_size:
                    break
            logger.info(
                "Marked %s messages as read for all users, stream_recipient_id %s",
                offset,
                event["stream_recipient_id"],
            )
        elif event["type"] == "clear_push_device_tokens":
            try:
                clear_push_device_tokens(event["user_profile_id"])
            except PushNotificationBouncerRetryLaterError:

                def failure_processor(event: Dict[str, Any]) -> None:
                    logger.warning(
                        "Maximum retries exceeded for trigger:%s event:clear_push_device_tokens",
                        event["user_profile_id"],
                    )

                retry_event(self.queue_name, event, failure_processor)
        elif event["type"] == "realm_export":
            realm = Realm.objects.get(id=event["realm_id"])
            output_dir = tempfile.mkdtemp(prefix="zulip-export-")
            export_event = RealmAuditLog.objects.get(id=event["id"])
            user_profile = get_user_profile_by_id(event["user_profile_id"])

            try:
                public_url = export_realm_wrapper(
                    realm=realm,
                    output_dir=output_dir,
                    threads=6,
                    upload=True,
                    public_only=True,
                    delete_after_upload=True,
                )
            except Exception:
                export_event.extra_data = orjson.dumps(
                    dict(failed_timestamp=timezone_now().timestamp(),
                         )).decode()
                export_event.save(update_fields=["extra_data"])
                logging.error(
                    "Data export for %s failed after %s",
                    user_profile.realm.string_id,
                    time.time() - start,
                )
                notify_realm_export(user_profile)
                return

            assert public_url is not None

            # Update the extra_data field now that the export is complete.
            export_event.extra_data = orjson.dumps(
                dict(export_path=urllib.parse.urlparse(public_url).path,
                     )).decode()
            export_event.save(update_fields=["extra_data"])

            # Send a private message notification letting the user who
            # triggered the export know the export finished.
            with override_language(user_profile.default_language):
                content = _(
                    "Your data export is complete and has been uploaded here:\n\n{public_url}"
                ).format(public_url=public_url)
            internal_send_private_message(
                sender=get_system_bot(settings.NOTIFICATION_BOT, realm.id),
                recipient_user=user_profile,
                content=content,
            )

            # For future frontend use, also notify administrator
            # clients that the export happened.
            notify_realm_export(user_profile)
            logging.info(
                "Completed data export for %s in %s",
                user_profile.realm.string_id,
                time.time() - start,
            )

        end = time.time()
        logger.info("deferred_work processed %s event (%dms)", event["type"],
                    (end - start) * 1000)
Exemplo n.º 7
0
    def consume(self, event: Dict[str, Any]) -> None:
        if event['type'] == 'mark_stream_messages_as_read':
            user_profile = get_user_profile_by_id(event['user_profile_id'])
            client = Client.objects.get(id=event['client_id'])

            for stream_id in event['stream_ids']:
                # Since the user just unsubscribed, we don't require
                # an active Subscription object (otherwise, private
                # streams would never be accessible)
                (stream, recipient, sub) = access_stream_by_id(user_profile, stream_id,
                                                               require_active=False)
                do_mark_stream_messages_as_read(user_profile, client, stream)
        elif event["type"] == 'mark_stream_messages_as_read_for_everyone':
            # This event is generated by the stream deactivation code path.
            batch_size = 100
            offset = 0
            while True:
                messages = Message.objects.filter(recipient_id=event["stream_recipient_id"]) \
                    .order_by("id")[offset:offset + batch_size]
                UserMessage.objects.filter(message__in=messages).extra(where=[UserMessage.where_unread()]) \
                    .update(flags=F('flags').bitor(UserMessage.flags.read))
                offset += len(messages)
                if len(messages) < batch_size:
                    break
        elif event['type'] == 'clear_push_device_tokens':
            try:
                clear_push_device_tokens(event["user_profile_id"])
            except PushNotificationBouncerRetryLaterError:
                def failure_processor(event: Dict[str, Any]) -> None:
                    logger.warning(
                        "Maximum retries exceeded for trigger:%s event:clear_push_device_tokens",
                        event['user_profile_id'])
                retry_event(self.queue_name, event, failure_processor)
        elif event['type'] == 'realm_export':
            start = time.time()
            realm = Realm.objects.get(id=event['realm_id'])
            output_dir = tempfile.mkdtemp(prefix="zulip-export-")
            export_event = RealmAuditLog.objects.get(id=event['id'])
            user_profile = get_user_profile_by_id(event['user_profile_id'])

            try:
                public_url = export_realm_wrapper(realm=realm, output_dir=output_dir,
                                                  threads=6, upload=True, public_only=True,
                                                  delete_after_upload=True)
            except Exception:
                export_event.extra_data = orjson.dumps(dict(
                    failed_timestamp=timezone_now().timestamp(),
                )).decode()
                export_event.save(update_fields=['extra_data'])
                logging.error(
                    "Data export for %s failed after %s",
                    user_profile.realm.string_id, time.time() - start,
                )
                notify_realm_export(user_profile)
                return

            assert public_url is not None

            # Update the extra_data field now that the export is complete.
            export_event.extra_data = orjson.dumps(dict(
                export_path=urllib.parse.urlparse(public_url).path,
            )).decode()
            export_event.save(update_fields=['extra_data'])

            # Send a private message notification letting the user who
            # triggered the export know the export finished.
            with override_language(user_profile.default_language):
                content = _("Your data export is complete and has been uploaded here:\n\n{public_url}").format(public_url=public_url)
            internal_send_private_message(
                realm=user_profile.realm,
                sender=get_system_bot(settings.NOTIFICATION_BOT),
                recipient_user=user_profile,
                content=content,
            )

            # For future frontend use, also notify administrator
            # clients that the export happened.
            notify_realm_export(user_profile)
            logging.info(
                "Completed data export for %s in %s",
                user_profile.realm.string_id, time.time() - start,
            )
Exemplo n.º 8
0
    def handle(self, *args: Any, **options: Any) -> None:
        realm = self.get_realm(options)
        assert realm is not None  # Should be ensured by parser

        output_dir = options["output_dir"]
        public_only = options["public_only"]
        consent_message_id = options["consent_message_id"]

        print(f"\033[94mExporting realm\033[0m: {realm.string_id}")

        num_threads = int(options['threads'])
        if num_threads < 1:
            raise CommandError('You must have at least one thread.')

        if public_only and consent_message_id is not None:
            raise CommandError('Please pass either --public-only or --consent-message-id')

        if options["deactivate_realm"] and realm.deactivated:
            raise CommandError(f"The realm {realm.string_id} is already deactivated.  Aborting...")

        if consent_message_id is not None:
            try:
                message = Message.objects.get(id=consent_message_id)
            except Message.DoesNotExist:
                raise CommandError("Message with given ID does not exist. Aborting...")

            if message.last_edit_time is not None:
                raise CommandError("Message was edited. Aborting...")

            # Since the message might have been sent by
            # Notification Bot, we can't trivially check the realm of
            # the message through message.sender.realm.  So instead we
            # check the realm of the people who reacted to the message
            # (who must all be in the message's realm).
            reactions = Reaction.objects.filter(message=message,
                                                # outbox = 1f4e4
                                                emoji_code="1f4e4",
                                                reaction_type="unicode_emoji")
            for reaction in reactions:
                if reaction.user_profile.realm != realm:
                    raise CommandError("Users from a different realm reacted to message. Aborting...")

            print(f"\n\033[94mMessage content:\033[0m\n{message.content}\n")

            user_count = UserProfile.objects.filter(realm_id=realm.id).count()
            print(f"\033[94mNumber of users that reacted outbox:\033[0m {len(reactions)} / {user_count} total users\n")

            proceed = input("Continue? [y/N] ")
            if proceed.lower() not in ('y', 'yes'):
                raise CommandError("Aborting!")

        if output_dir is None:
            output_dir = tempfile.mkdtemp(prefix="zulip-export-")
        else:
            output_dir = os.path.realpath(os.path.expanduser(output_dir))
            if os.path.exists(output_dir):
                if os.listdir(output_dir):
                    raise CommandError(
                        f"Refusing to overwrite nonempty directory: {output_dir}. Aborting...",
                    )
            else:
                os.makedirs(output_dir)

        tarball_path = output_dir.rstrip("/") + ".tar.gz"
        try:
            os.close(os.open(tarball_path, os.O_CREAT | os.O_EXCL | os.O_WRONLY, 0o666))
        except FileExistsError:
            raise CommandError(f"Refusing to overwrite existing tarball: {tarball_path}. Aborting...")

        if options["deactivate_realm"]:
            print(f"\033[94mDeactivating realm\033[0m: {realm.string_id}")
            do_deactivate_realm(realm)

        def percent_callback(bytes_transferred: Any) -> None:
            sys.stdout.write('.')
            sys.stdout.flush()

        # Allows us to trigger exports separately from command line argument parsing
        export_realm_wrapper(realm=realm, output_dir=output_dir,
                             threads=num_threads, upload=options['upload'],
                             public_only=public_only,
                             delete_after_upload=options["delete_after_upload"],
                             percent_callback=percent_callback,
                             consent_message_id=consent_message_id)
Exemplo n.º 9
0
    def handle(self, *args: Any, **options: Any) -> None:
        realm = self.get_realm(options)
        assert realm is not None  # Should be ensured by parser

        output_dir = options["output_dir"]
        public_only = options["public_only"]
        consent_message_id = options["consent_message_id"]

        if output_dir is None:
            output_dir = tempfile.mkdtemp(prefix="zulip-export-")
        else:
            output_dir = os.path.realpath(os.path.expanduser(output_dir))
            if os.path.exists(output_dir):
                if os.listdir(output_dir):
                    raise CommandError(
                        "Refusing to overwrite nonempty directory: %s. Aborting..."
                        % (output_dir, ))
            else:
                os.makedirs(output_dir)

        tarball_path = output_dir.rstrip("/") + ".tar.gz"
        try:
            os.close(
                os.open(tarball_path, os.O_CREAT | os.O_EXCL | os.O_WRONLY,
                        0o666))
        except FileExistsError:
            raise CommandError(
                "Refusing to overwrite existing tarball: %s. Aborting..." %
                (tarball_path, ))

        print("\033[94mExporting realm\033[0m: %s" % (realm.string_id, ))

        num_threads = int(options['threads'])
        if num_threads < 1:
            raise CommandError('You must have at least one thread.')

        if public_only and consent_message_id is not None:
            raise CommandError(
                'Please pass either --public-only or --consent-message-id')

        if consent_message_id is not None:
            try:
                message = Message.objects.get(id=consent_message_id)
            except Message.DoesNotExist:
                raise CommandError(
                    "Message with given ID does not exist. Aborting...")

            if message.last_edit_time is not None:
                raise CommandError("Message was edited. Aborting...")

            # Since the message might have been sent by
            # Notification Bot, we can't trivially check the realm of
            # the message through message.sender.realm.  So instead we
            # check the realm of the people who reacted to the message
            # (who must all be in the message's realm).
            reactions = Reaction.objects.filter(
                message=message,
                # outbox = 1f4e4
                emoji_code="1f4e4",
                reaction_type="unicode_emoji")
            for reaction in reactions:
                if reaction.user_profile.realm != realm:
                    raise CommandError(
                        "Users from a different realm reacted to message. Aborting..."
                    )

            print("\n\033[94mMessage content:\033[0m\n{}\n".format(
                message.content))

            print("\033[94mNumber of users that reacted outbox:\033[0m {}\n".
                  format(len(reactions)))

        # Allows us to trigger exports separately from command line argument parsing
        export_realm_wrapper(
            realm=realm,
            output_dir=output_dir,
            threads=num_threads,
            upload=options['upload'],
            public_only=public_only,
            delete_after_upload=options["delete_after_upload"],
            consent_message_id=consent_message_id)