def consume(self, event: Dict[str, Any]) -> None: if event['type'] == 'mark_stream_messages_as_read': user_profile = get_user_profile_by_id(event['user_profile_id']) client = Client.objects.get(id=event['client_id']) for stream_id in event['stream_ids']: # Since the user just unsubscribed, we don't require # an active Subscription object (otherwise, private # streams would never be accessible) (stream, recipient, sub) = access_stream_by_id(user_profile, stream_id, require_active=False) do_mark_stream_messages_as_read(user_profile, client, stream) elif event['type'] == 'clear_push_device_tokens': try: clear_push_device_tokens(event["user_profile_id"]) except PushNotificationBouncerRetryLaterError: def failure_processor(event: Dict[str, Any]) -> None: logger.warning( "Maximum retries exceeded for trigger:%s event:clear_push_device_tokens" % (event['user_profile_id'], )) retry_event(self.queue_name, event, failure_processor) elif event['type'] == 'realm_export': start = time.time() realm = Realm.objects.get(id=event['realm_id']) output_dir = tempfile.mkdtemp(prefix="zulip-export-") public_url = export_realm_wrapper(realm=realm, output_dir=output_dir, threads=6, upload=True, public_only=True, delete_after_upload=True) assert public_url is not None # Update the extra_data field now that the export is complete. export_event = RealmAuditLog.objects.get(id=event['id']) export_event.extra_data = ujson.dumps( dict(export_path=urllib.parse.urlparse(public_url).path, )) export_event.save(update_fields=['extra_data']) # Send a private message notification letting the user who # triggered the export know the export finished. user_profile = get_user_profile_by_id(event['user_profile_id']) content = "Your data export is complete and has been uploaded here:\n\n%s" % ( public_url, ) internal_send_private_message(realm=user_profile.realm, sender=get_system_bot( settings.NOTIFICATION_BOT), recipient_user=user_profile, content=content) # For future frontend use, also notify administrator # clients that the export happened. notify_realm_export(user_profile) logging.info("Completed data export for %s in %s" % (user_profile.realm.string_id, time.time() - start))
def consume(self, event: Dict[str, Any]) -> None: start = time.time() if event["type"] == "mark_stream_messages_as_read": user_profile = get_user_profile_by_id(event["user_profile_id"]) for recipient_id in event["stream_recipient_ids"]: count = do_mark_stream_messages_as_read( user_profile, recipient_id) logger.info( "Marked %s messages as read for user %s, stream_recipient_id %s", count, user_profile.id, recipient_id, ) elif event["type"] == "mark_stream_messages_as_read_for_everyone": # This event is generated by the stream deactivation code path. batch_size = 100 offset = 0 while True: messages = Message.objects.filter( recipient_id=event["stream_recipient_id"]).order_by( "id")[offset:offset + batch_size] UserMessage.objects.filter(message__in=messages).extra( where=[UserMessage.where_unread()]).update( flags=F("flags").bitor(UserMessage.flags.read)) offset += len(messages) if len(messages) < batch_size: break logger.info( "Marked %s messages as read for all users, stream_recipient_id %s", offset, event["stream_recipient_id"], ) elif event["type"] == "clear_push_device_tokens": try: clear_push_device_tokens(event["user_profile_id"]) except PushNotificationBouncerRetryLaterError: def failure_processor(event: Dict[str, Any]) -> None: logger.warning( "Maximum retries exceeded for trigger:%s event:clear_push_device_tokens", event["user_profile_id"], ) retry_event(self.queue_name, event, failure_processor) elif event["type"] == "realm_export": realm = Realm.objects.get(id=event["realm_id"]) output_dir = tempfile.mkdtemp(prefix="zulip-export-") export_event = RealmAuditLog.objects.get(id=event["id"]) user_profile = get_user_profile_by_id(event["user_profile_id"]) try: public_url = export_realm_wrapper( realm=realm, output_dir=output_dir, threads=6, upload=True, public_only=True, delete_after_upload=True, ) except Exception: export_event.extra_data = orjson.dumps( dict(failed_timestamp=timezone_now().timestamp(), )).decode() export_event.save(update_fields=["extra_data"]) logging.error( "Data export for %s failed after %s", user_profile.realm.string_id, time.time() - start, ) notify_realm_export(user_profile) return assert public_url is not None # Update the extra_data field now that the export is complete. export_event.extra_data = orjson.dumps( dict(export_path=urllib.parse.urlparse(public_url).path, )).decode() export_event.save(update_fields=["extra_data"]) # Send a private message notification letting the user who # triggered the export know the export finished. with override_language(user_profile.default_language): content = _( "Your data export is complete and has been uploaded here:\n\n{public_url}" ).format(public_url=public_url) internal_send_private_message( sender=get_system_bot(settings.NOTIFICATION_BOT, realm.id), recipient_user=user_profile, content=content, ) # For future frontend use, also notify administrator # clients that the export happened. notify_realm_export(user_profile) logging.info( "Completed data export for %s in %s", user_profile.realm.string_id, time.time() - start, ) end = time.time() logger.info("deferred_work processed %s event (%dms)", event["type"], (end - start) * 1000)
def consume(self, event: Dict[str, Any]) -> None: if event['type'] == 'mark_stream_messages_as_read': user_profile = get_user_profile_by_id(event['user_profile_id']) client = Client.objects.get(id=event['client_id']) for stream_id in event['stream_ids']: # Since the user just unsubscribed, we don't require # an active Subscription object (otherwise, private # streams would never be accessible) (stream, recipient, sub) = access_stream_by_id(user_profile, stream_id, require_active=False) do_mark_stream_messages_as_read(user_profile, client, stream) elif event["type"] == 'mark_stream_messages_as_read_for_everyone': # This event is generated by the stream deactivation code path. batch_size = 100 offset = 0 while True: messages = Message.objects.filter(recipient_id=event["stream_recipient_id"]) \ .order_by("id")[offset:offset + batch_size] UserMessage.objects.filter(message__in=messages).extra(where=[UserMessage.where_unread()]) \ .update(flags=F('flags').bitor(UserMessage.flags.read)) offset += len(messages) if len(messages) < batch_size: break elif event['type'] == 'clear_push_device_tokens': try: clear_push_device_tokens(event["user_profile_id"]) except PushNotificationBouncerRetryLaterError: def failure_processor(event: Dict[str, Any]) -> None: logger.warning( "Maximum retries exceeded for trigger:%s event:clear_push_device_tokens", event['user_profile_id']) retry_event(self.queue_name, event, failure_processor) elif event['type'] == 'realm_export': start = time.time() realm = Realm.objects.get(id=event['realm_id']) output_dir = tempfile.mkdtemp(prefix="zulip-export-") export_event = RealmAuditLog.objects.get(id=event['id']) user_profile = get_user_profile_by_id(event['user_profile_id']) try: public_url = export_realm_wrapper(realm=realm, output_dir=output_dir, threads=6, upload=True, public_only=True, delete_after_upload=True) except Exception: export_event.extra_data = orjson.dumps(dict( failed_timestamp=timezone_now().timestamp(), )).decode() export_event.save(update_fields=['extra_data']) logging.error( "Data export for %s failed after %s", user_profile.realm.string_id, time.time() - start, ) notify_realm_export(user_profile) return assert public_url is not None # Update the extra_data field now that the export is complete. export_event.extra_data = orjson.dumps(dict( export_path=urllib.parse.urlparse(public_url).path, )).decode() export_event.save(update_fields=['extra_data']) # Send a private message notification letting the user who # triggered the export know the export finished. with override_language(user_profile.default_language): content = _("Your data export is complete and has been uploaded here:\n\n{public_url}").format(public_url=public_url) internal_send_private_message( realm=user_profile.realm, sender=get_system_bot(settings.NOTIFICATION_BOT), recipient_user=user_profile, content=content, ) # For future frontend use, also notify administrator # clients that the export happened. notify_realm_export(user_profile) logging.info( "Completed data export for %s in %s", user_profile.realm.string_id, time.time() - start, )