Exemple #1
0
    def consume(self, event: Mapping[str, Any]) -> None:
        if event['type'] == 'mark_stream_messages_as_read':
            user_profile = get_user_profile_by_id(event['user_profile_id'])

            for stream_id in event['stream_ids']:
                (stream, recipient, sub) = access_stream_by_id(user_profile, stream_id)
                do_mark_stream_messages_as_read(user_profile, stream)
Exemple #2
0
    def consume(self, event: Dict[str, Any]) -> None:
        if event['type'] == 'mark_stream_messages_as_read':
            user_profile = get_user_profile_by_id(event['user_profile_id'])
            client = Client.objects.get(id=event['client_id'])

            for stream_id in event['stream_ids']:
                # Since the user just unsubscribed, we don't require
                # an active Subscription object (otherwise, private
                # streams would never be accessible)
                (stream, recipient,
                 sub) = access_stream_by_id(user_profile,
                                            stream_id,
                                            require_active=False)
                do_mark_stream_messages_as_read(user_profile, client, stream)
        elif event['type'] == 'clear_push_device_tokens':
            try:
                clear_push_device_tokens(event["user_profile_id"])
            except PushNotificationBouncerRetryLaterError:

                def failure_processor(event: Dict[str, Any]) -> None:
                    logger.warning(
                        "Maximum retries exceeded for trigger:%s event:clear_push_device_tokens"
                        % (event['user_profile_id'], ))

                retry_event(self.queue_name, event, failure_processor)
        elif event['type'] == 'realm_export':
            start = time.time()
            realm = Realm.objects.get(id=event['realm_id'])
            output_dir = tempfile.mkdtemp(prefix="zulip-export-")

            public_url = export_realm_wrapper(realm=realm,
                                              output_dir=output_dir,
                                              threads=6,
                                              upload=True,
                                              public_only=True,
                                              delete_after_upload=True)
            assert public_url is not None

            # Update the extra_data field now that the export is complete.
            export_event = RealmAuditLog.objects.get(id=event['id'])
            export_event.extra_data = ujson.dumps(
                dict(export_path=urllib.parse.urlparse(public_url).path, ))
            export_event.save(update_fields=['extra_data'])

            # Send a private message notification letting the user who
            # triggered the export know the export finished.
            user_profile = get_user_profile_by_id(event['user_profile_id'])
            content = "Your data export is complete and has been uploaded here:\n\n%s" % (
                public_url, )
            internal_send_private_message(realm=user_profile.realm,
                                          sender=get_system_bot(
                                              settings.NOTIFICATION_BOT),
                                          recipient_user=user_profile,
                                          content=content)

            # For future frontend use, also notify administrator
            # clients that the export happened.
            notify_realm_export(user_profile)
            logging.info("Completed data export for %s in %s" %
                         (user_profile.realm.string_id, time.time() - start))
Exemple #3
0
    def consume(self, event: Mapping[str, Any]) -> None:
        if event['type'] == 'mark_stream_messages_as_read':
            user_profile = get_user_profile_by_id(event['user_profile_id'])

            for stream_id in event['stream_ids']:
                # Since the user just unsubscribed, we don't require
                # an active Subscription object (otherwise, private
                # streams would never be accessible)
                (stream, recipient, sub) = access_stream_by_id(user_profile, stream_id,
                                                               require_active=False)
                do_mark_stream_messages_as_read(user_profile, stream)
Exemple #4
0
    def consume(self, event: Mapping[str, Any]) -> None:
        if event['type'] == 'mark_stream_messages_as_read':
            user_profile = get_user_profile_by_id(event['user_profile_id'])

            for stream_id in event['stream_ids']:
                # Since the user just unsubscribed, we don't require
                # an active Subscription object (otherwise, private
                # streams would never be accessible)
                (stream, recipient, sub) = access_stream_by_id(user_profile, stream_id,
                                                               require_active=False)
                do_mark_stream_messages_as_read(user_profile, stream)
Exemple #5
0
    def consume(self, event: Mapping[str, Any]) -> None:
        if event['type'] == 'mark_stream_messages_as_read':
            user_profile = get_user_profile_by_id(event['user_profile_id'])
            client = Client.objects.get(id=event['client_id'])

            for stream_id in event['stream_ids']:
                # Since the user just unsubscribed, we don't require
                # an active Subscription object (otherwise, private
                # streams would never be accessible)
                (stream, recipient,
                 sub) = access_stream_by_id(user_profile,
                                            stream_id,
                                            require_active=False)
                do_mark_stream_messages_as_read(user_profile, client, stream)
        elif event['type'] == 'realm_export':
            realm = Realm.objects.get(id=event['realm_id'])
            output_dir = tempfile.mkdtemp(prefix="zulip-export-")

            public_url = export_realm_wrapper(realm=realm,
                                              output_dir=output_dir,
                                              threads=6,
                                              upload=True,
                                              public_only=True,
                                              delete_after_upload=True)
            assert public_url is not None

            # Store the relative URL of the export.
            export_event = RealmAuditLog.objects.get(id=event['id'])
            export_event.extra_data = ujson.dumps({
                'export_path':
                urllib.parse.urlparse(public_url).path,
                'deleted_timestamp':
                None
            })
            export_event.save(update_fields=['extra_data'])

            # Send a private message notification letting the user who
            # triggered the export know the export finished.
            user_profile = get_user_profile_by_id(event['user_profile_id'])
            content = "Your data export is complete and has been uploaded here:\n\n%s" % (
                public_url, )
            internal_send_private_message(realm=user_profile.realm,
                                          sender=get_system_bot(
                                              settings.NOTIFICATION_BOT),
                                          recipient_user=user_profile,
                                          content=content)

            # For future frontend use, also notify administrator
            # clients that the export happened.
            notify_realm_export(user_profile)
def mark_topic_as_read(
        request: HttpRequest,
        user_profile: UserProfile,
        stream_id: int = REQ(json_validator=check_int),
        topic_name: str = REQ(),
) -> HttpResponse:
    stream, sub = access_stream_by_id(user_profile, stream_id)

    if topic_name:
        topic_exists = user_message_exists_for_topic(
            user_profile=user_profile,
            recipient_id=stream.recipient_id,
            topic_name=topic_name,
        )

        if not topic_exists:
            raise JsonableError(_("No such topic '{}'").format(topic_name))

    count = do_mark_stream_messages_as_read(user_profile, stream.recipient_id,
                                            topic_name)

    log_data_str = f"[{count} updated]"
    log_data = RequestNotes.get_notes(request).log_data
    assert log_data is not None
    log_data["extra"] = log_data_str

    return json_success(request)
Exemple #7
0
def mark_topic_as_read(
    request: HttpRequest,
    user_profile: UserProfile,
    stream_id: int = REQ(validator=check_int),
    topic_name: str = REQ()
) -> HttpResponse:
    stream, sub = access_stream_by_id(user_profile, stream_id)

    if topic_name:
        topic_exists = user_message_exists_for_topic(
            user_profile=user_profile,
            recipient_id=stream.recipient_id,
            topic_name=topic_name,
        )

        if not topic_exists:
            raise JsonableError(_('No such topic \'{}\'').format(topic_name))

    count = do_mark_stream_messages_as_read(user_profile, stream.recipient_id,
                                            topic_name)

    log_data_str = f"[{count} updated]"
    request._log_data["extra"] = log_data_str

    return json_success({'result': 'success', 'msg': ''})
    def consume(self, event: Mapping[str, Any]) -> None:
        if event['type'] == 'mark_stream_messages_as_read':
            user_profile = get_user_profile_by_id(event['user_profile_id'])
            client = Client.objects.get(id=event['client_id'])

            for stream_id in event['stream_ids']:
                # Since the user just unsubscribed, we don't require
                # an active Subscription object (otherwise, private
                # streams would never be accessible)
                (stream, recipient,
                 sub) = access_stream_by_id(user_profile,
                                            stream_id,
                                            require_active=False)
                do_mark_stream_messages_as_read(user_profile, client, stream)
        elif event['type'] == 'realm_exported':
            realm = Realm.objects.get(id=event['realm_id'])
            output_dir = tempfile.mkdtemp(prefix="zulip-export-")

            # TODO: Add support for the LOCAL_UPLOADS_DIR uploads
            # backend in export_realm_wrapper so we don't need this assertion.
            assert settings.LOCAL_UPLOADS_DIR is None

            public_url = export_realm_wrapper(realm=realm,
                                              output_dir=output_dir,
                                              threads=6,
                                              upload=True,
                                              public_only=True,
                                              delete_after_upload=True)
            assert public_url is not None

            # Send a private message notification letting the user who
            # triggered the export know the export finished.
            user_profile = get_user_profile_by_id(event['user_profile_id'])
            content = "Your data export is complete and has been uploaded here:\n\n%s" % (
                public_url, )
            internal_send_private_message(realm=user_profile.realm,
                                          sender=get_system_bot(
                                              settings.NOTIFICATION_BOT),
                                          recipient_user=user_profile,
                                          content=content)

            # For future frontend use, also notify administrator
            # clients that the export happened, including sending the
            # url.
            notify_export_completed(user_profile, public_url)
Exemple #9
0
def mark_stream_as_read(
    request: HttpRequest, user_profile: UserProfile, stream_id: int = REQ(json_validator=check_int)
) -> HttpResponse:
    stream, sub = access_stream_by_id(user_profile, stream_id)
    count = do_mark_stream_messages_as_read(user_profile, stream.recipient_id)

    log_data_str = f"[{count} updated]"
    request._log_data["extra"] = log_data_str

    return json_success({"result": "success", "msg": ""})
def mark_stream_as_read(
    request: HttpRequest, user_profile: UserProfile, stream_id: int = REQ(json_validator=check_int)
) -> HttpResponse:
    stream, sub = access_stream_by_id(user_profile, stream_id)
    count = do_mark_stream_messages_as_read(user_profile, stream.recipient_id)

    log_data_str = f"[{count} updated]"
    log_data = RequestNotes.get_notes(request).log_data
    assert log_data is not None
    log_data["extra"] = log_data_str

    return json_success(request)
Exemple #11
0
def mark_stream_as_read(
    request: HttpRequest,
    user_profile: UserProfile,
    stream_id: int = REQ(validator=check_int)
) -> HttpResponse:
    stream, recipient, sub = access_stream_by_id(user_profile, stream_id)
    count = do_mark_stream_messages_as_read(user_profile, request.client,
                                            stream)

    log_data_str = f"[{count} updated]"
    request._log_data["extra"] = log_data_str

    return json_success({'result': 'success', 'msg': ''})
Exemple #12
0
    def consume(self, event: Dict[str, Any]) -> None:
        start = time.time()
        if event["type"] == "mark_stream_messages_as_read":
            user_profile = get_user_profile_by_id(event["user_profile_id"])

            for recipient_id in event["stream_recipient_ids"]:
                count = do_mark_stream_messages_as_read(
                    user_profile, recipient_id)
                logger.info(
                    "Marked %s messages as read for user %s, stream_recipient_id %s",
                    count,
                    user_profile.id,
                    recipient_id,
                )
        elif event["type"] == "mark_stream_messages_as_read_for_everyone":
            # This event is generated by the stream deactivation code path.
            batch_size = 100
            offset = 0
            while True:
                messages = Message.objects.filter(
                    recipient_id=event["stream_recipient_id"]).order_by(
                        "id")[offset:offset + batch_size]
                UserMessage.objects.filter(message__in=messages).extra(
                    where=[UserMessage.where_unread()]).update(
                        flags=F("flags").bitor(UserMessage.flags.read))
                offset += len(messages)
                if len(messages) < batch_size:
                    break
            logger.info(
                "Marked %s messages as read for all users, stream_recipient_id %s",
                offset,
                event["stream_recipient_id"],
            )
        elif event["type"] == "clear_push_device_tokens":
            try:
                clear_push_device_tokens(event["user_profile_id"])
            except PushNotificationBouncerRetryLaterError:

                def failure_processor(event: Dict[str, Any]) -> None:
                    logger.warning(
                        "Maximum retries exceeded for trigger:%s event:clear_push_device_tokens",
                        event["user_profile_id"],
                    )

                retry_event(self.queue_name, event, failure_processor)
        elif event["type"] == "realm_export":
            realm = Realm.objects.get(id=event["realm_id"])
            output_dir = tempfile.mkdtemp(prefix="zulip-export-")
            export_event = RealmAuditLog.objects.get(id=event["id"])
            user_profile = get_user_profile_by_id(event["user_profile_id"])

            try:
                public_url = export_realm_wrapper(
                    realm=realm,
                    output_dir=output_dir,
                    threads=6,
                    upload=True,
                    public_only=True,
                    delete_after_upload=True,
                )
            except Exception:
                export_event.extra_data = orjson.dumps(
                    dict(failed_timestamp=timezone_now().timestamp(),
                         )).decode()
                export_event.save(update_fields=["extra_data"])
                logging.error(
                    "Data export for %s failed after %s",
                    user_profile.realm.string_id,
                    time.time() - start,
                )
                notify_realm_export(user_profile)
                return

            assert public_url is not None

            # Update the extra_data field now that the export is complete.
            export_event.extra_data = orjson.dumps(
                dict(export_path=urllib.parse.urlparse(public_url).path,
                     )).decode()
            export_event.save(update_fields=["extra_data"])

            # Send a private message notification letting the user who
            # triggered the export know the export finished.
            with override_language(user_profile.default_language):
                content = _(
                    "Your data export is complete and has been uploaded here:\n\n{public_url}"
                ).format(public_url=public_url)
            internal_send_private_message(
                sender=get_system_bot(settings.NOTIFICATION_BOT, realm.id),
                recipient_user=user_profile,
                content=content,
            )

            # For future frontend use, also notify administrator
            # clients that the export happened.
            notify_realm_export(user_profile)
            logging.info(
                "Completed data export for %s in %s",
                user_profile.realm.string_id,
                time.time() - start,
            )

        end = time.time()
        logger.info("deferred_work processed %s event (%dms)", event["type"],
                    (end - start) * 1000)
Exemple #13
0
    def consume(self, event: Dict[str, Any]) -> None:
        if event['type'] == 'mark_stream_messages_as_read':
            user_profile = get_user_profile_by_id(event['user_profile_id'])
            client = Client.objects.get(id=event['client_id'])

            for stream_id in event['stream_ids']:
                # Since the user just unsubscribed, we don't require
                # an active Subscription object (otherwise, private
                # streams would never be accessible)
                (stream, recipient, sub) = access_stream_by_id(user_profile, stream_id,
                                                               require_active=False)
                do_mark_stream_messages_as_read(user_profile, client, stream)
        elif event["type"] == 'mark_stream_messages_as_read_for_everyone':
            # This event is generated by the stream deactivation code path.
            batch_size = 100
            offset = 0
            while True:
                messages = Message.objects.filter(recipient_id=event["stream_recipient_id"]) \
                    .order_by("id")[offset:offset + batch_size]
                UserMessage.objects.filter(message__in=messages).extra(where=[UserMessage.where_unread()]) \
                    .update(flags=F('flags').bitor(UserMessage.flags.read))
                offset += len(messages)
                if len(messages) < batch_size:
                    break
        elif event['type'] == 'clear_push_device_tokens':
            try:
                clear_push_device_tokens(event["user_profile_id"])
            except PushNotificationBouncerRetryLaterError:
                def failure_processor(event: Dict[str, Any]) -> None:
                    logger.warning(
                        "Maximum retries exceeded for trigger:%s event:clear_push_device_tokens",
                        event['user_profile_id'])
                retry_event(self.queue_name, event, failure_processor)
        elif event['type'] == 'realm_export':
            start = time.time()
            realm = Realm.objects.get(id=event['realm_id'])
            output_dir = tempfile.mkdtemp(prefix="zulip-export-")
            export_event = RealmAuditLog.objects.get(id=event['id'])
            user_profile = get_user_profile_by_id(event['user_profile_id'])

            try:
                public_url = export_realm_wrapper(realm=realm, output_dir=output_dir,
                                                  threads=6, upload=True, public_only=True,
                                                  delete_after_upload=True)
            except Exception:
                export_event.extra_data = orjson.dumps(dict(
                    failed_timestamp=timezone_now().timestamp(),
                )).decode()
                export_event.save(update_fields=['extra_data'])
                logging.error(
                    "Data export for %s failed after %s",
                    user_profile.realm.string_id, time.time() - start,
                )
                notify_realm_export(user_profile)
                return

            assert public_url is not None

            # Update the extra_data field now that the export is complete.
            export_event.extra_data = orjson.dumps(dict(
                export_path=urllib.parse.urlparse(public_url).path,
            )).decode()
            export_event.save(update_fields=['extra_data'])

            # Send a private message notification letting the user who
            # triggered the export know the export finished.
            with override_language(user_profile.default_language):
                content = _("Your data export is complete and has been uploaded here:\n\n{public_url}").format(public_url=public_url)
            internal_send_private_message(
                realm=user_profile.realm,
                sender=get_system_bot(settings.NOTIFICATION_BOT),
                recipient_user=user_profile,
                content=content,
            )

            # For future frontend use, also notify administrator
            # clients that the export happened.
            notify_realm_export(user_profile)
            logging.info(
                "Completed data export for %s in %s",
                user_profile.realm.string_id, time.time() - start,
            )