def store_user_message_to_insert(message: Message) -> None: if message["id"] not in seen_message_ids: user_message = UserMessage(user_profile=user_profile, message_id=message["id"], flags=0) user_messages_to_insert.append(user_message) seen_message_ids.add(message["id"])
def get_raw_unread_data(user_profile: UserProfile) -> RawUnreadMessagesResult: excluded_recipient_ids = get_inactive_recipient_ids(user_profile) user_msgs = ( UserMessage.objects.filter( user_profile=user_profile, ) .exclude( message__recipient_id__in=excluded_recipient_ids, ) .extra( where=[UserMessage.where_unread()], ) .values( "message_id", "message__sender_id", MESSAGE__TOPIC, "message__recipient_id", "message__recipient__type", "message__recipient__type_id", "flags", ) .order_by("-message_id") ) # Limit unread messages for performance reasons. user_msgs = list(user_msgs[:MAX_UNREAD_MESSAGES]) rows = list(reversed(user_msgs)) return extract_unread_data_from_um_rows(rows, user_profile)
def get_mobile_push_notification_ids( self, user_profile: UserProfile) -> List[int]: return list( UserMessage.objects.filter(user_profile=user_profile, ).extra( where=[UserMessage.where_active_push_notification() ], ).order_by("message_id").values_list("message_id", flat=True))
def do_clear_mobile_push_notifications_for_ids( user_profile_ids: List[int], message_ids: List[int] ) -> None: if len(message_ids) == 0: return # This function supports clearing notifications for several users # only for the message-edit use case where we'll have a single message_id. assert len(user_profile_ids) == 1 or len(message_ids) == 1 messages_by_user = defaultdict(list) notifications_to_update = list( UserMessage.objects.filter( message_id__in=message_ids, user_profile_id__in=user_profile_ids, ) .extra( where=[UserMessage.where_active_push_notification()], ) .values_list("user_profile_id", "message_id") ) for (user_id, message_id) in notifications_to_update: messages_by_user[user_id].append(message_id) for (user_profile_id, event_message_ids) in messages_by_user.items(): queue_json_publish( "missedmessage_mobile_notifications", { "type": "remove", "user_profile_id": user_profile_id, "message_ids": event_message_ids, }, )
def do_mark_all_as_read(user_profile: UserProfile) -> int: log_statsd_event("bankruptcy") # First, we clear mobile push notifications. This is safer in the # event that the below logic times out and we're killed. all_push_message_ids = ( UserMessage.objects.filter( user_profile=user_profile, ) .extra( where=[UserMessage.where_active_push_notification()], ) .values_list("message_id", flat=True)[0:10000] ) do_clear_mobile_push_notifications_for_ids([user_profile.id], all_push_message_ids) msgs = UserMessage.objects.filter(user_profile=user_profile).extra( where=[UserMessage.where_unread()], ) count = msgs.update( flags=F("flags").bitor(UserMessage.flags.read), ) event = asdict( ReadMessagesEvent( messages=[], # we don't send messages, since the client reloads anyway all=True, ) ) event_time = timezone_now() send_event(user_profile.realm, event, [user_profile.id]) do_increment_logging_stat( user_profile, COUNT_STATS["messages_read::hour"], None, event_time, increment=count ) do_increment_logging_stat( user_profile, COUNT_STATS["messages_read_interactions::hour"], None, event_time, increment=min(1, count), ) return count
def get_starred_message_ids(user_profile: UserProfile) -> List[int]: return list(UserMessage.objects.filter( user_profile=user_profile, ).extra( where=[UserMessage.where_starred()], ).order_by( 'message_id', ).values_list('message_id', flat=True)[0:10000])
def get_starred_message_ids(user_profile: UserProfile) -> List[int]: return list(UserMessage.objects.filter( user_profile=user_profile, ).extra( where=[UserMessage.where_starred()] ).order_by( 'message_id' ).values_list('message_id', flat=True)[0:10000])
def add_new_user_history(user_profile: UserProfile, streams: Iterable[Stream]) -> None: """Give you the last ONBOARDING_TOTAL_MESSAGES messages on your public streams, so you have something to look at in your home view once you finish the tutorial. The most recent ONBOARDING_UNREAD_MESSAGES are marked unread. """ one_week_ago = timezone_now() - ONBOARDING_RECENT_TIMEDELTA recipient_ids = [ stream.recipient_id for stream in streams if not stream.invite_only ] recent_messages = Message.objects.filter( recipient_id__in=recipient_ids, date_sent__gt=one_week_ago).order_by("-id") message_ids_to_use = list( reversed( recent_messages.values_list( "id", flat=True)[0:ONBOARDING_TOTAL_MESSAGES])) if len(message_ids_to_use) == 0: return # Handle the race condition where a message arrives between # bulk_add_subscriptions above and the Message query just above already_ids = set( UserMessage.objects.filter(message_id__in=message_ids_to_use, user_profile=user_profile).values_list( "message_id", flat=True)) # Mark the newest ONBOARDING_UNREAD_MESSAGES as unread. marked_unread = 0 ums_to_create = [] for message_id in reversed(message_ids_to_use): if message_id in already_ids: continue um = UserMessage(user_profile=user_profile, message_id=message_id) if marked_unread < ONBOARDING_UNREAD_MESSAGES: marked_unread += 1 else: um.flags = UserMessage.flags.read ums_to_create.append(um) UserMessage.objects.bulk_create(reversed(ums_to_create))
def get_apns_badge_count( user_profile: UserProfile, read_messages_ids: Optional[Sequence[int]] = []) -> int: return UserMessage.objects.filter(user_profile=user_profile).extra( where=[UserMessage.where_active_push_notification()] ).exclude( # If we've just marked some messages as read, they're still # marked as having active notifications; we'll clear that flag # only after we've sent that update to the devices. So we need # to exclude them explicitly from the count. message_id__in=read_messages_ids).count()
def get_apns_badge_count_future( user_profile: UserProfile, read_messages_ids: Optional[Sequence[int]] = []) -> int: # Future implementation of get_apns_badge_count; unused but # we expect to use this once we resolve client-side bugs. return (UserMessage.objects.filter(user_profile=user_profile).extra( where=[UserMessage.where_active_push_notification()] ).exclude( # If we've just marked some messages as read, they're still # marked as having active notifications; we'll clear that flag # only after we've sent that update to the devices. So we need # to exclude them explicitly from the count. message_id__in=read_messages_ids).count())
def do_mark_stream_messages_as_read( user_profile: UserProfile, stream_recipient_id: int, topic_name: Optional[str] = None ) -> int: log_statsd_event("mark_stream_as_read") msgs = UserMessage.objects.filter( user_profile=user_profile, ) msgs = msgs.filter(message__recipient_id=stream_recipient_id) if topic_name: msgs = filter_by_topic_name_via_message( query=msgs, topic_name=topic_name, ) msgs = msgs.extra( where=[UserMessage.where_unread()], ) message_ids = list(msgs.values_list("message_id", flat=True)) count = msgs.update( flags=F("flags").bitor(UserMessage.flags.read), ) event = asdict( ReadMessagesEvent( messages=message_ids, all=False, ) ) event_time = timezone_now() send_event(user_profile.realm, event, [user_profile.id]) do_clear_mobile_push_notifications_for_ids([user_profile.id], message_ids) do_increment_logging_stat( user_profile, COUNT_STATS["messages_read::hour"], None, event_time, increment=count ) do_increment_logging_stat( user_profile, COUNT_STATS["messages_read_interactions::hour"], None, event_time, increment=min(1, count), ) return count
def get_unread_messages(user_profile): # type: (UserProfile) -> List[Dict[str, Any]] user_msgs = UserMessage.objects.filter( user_profile=user_profile, message__recipient__type=Recipient.STREAM).extra( where=[UserMessage.where_unread()]).values( 'message_id', 'message__subject', 'message__recipient__type_id', ).order_by("message_id") result = [ dict( message_id=row['message_id'], topic=row['message__subject'], stream_id=row['message__recipient__type_id'], ) for row in list(user_msgs) ] return result
def get_unread_messages(user_profile): # type: (UserProfile) -> List[Dict[str, Any]] user_msgs = UserMessage.objects.filter( user_profile=user_profile, message__recipient__type=Recipient.STREAM ).extra( where=[UserMessage.where_unread()] ).values( 'message_id', 'message__subject', 'message__recipient__type_id', ).order_by("message_id") result = [ dict( message_id=row['message_id'], topic=row['message__subject'], stream_id=row['message__recipient__type_id'], ) for row in list(user_msgs)] return result
def do_mark_muted_user_messages_as_read( user_profile: UserProfile, muted_user: UserProfile, ) -> int: messages = UserMessage.objects.filter( user_profile=user_profile, message__sender=muted_user).extra(where=[UserMessage.where_unread()]) message_ids = list(messages.values_list("message_id", flat=True)) if len(message_ids) == 0: return 0 count = messages.update(flags=F("flags").bitor(UserMessage.flags.read), ) event = asdict(ReadMessagesEvent( messages=message_ids, all=False, )) event_time = timezone_now() send_event(user_profile.realm, event, [user_profile.id]) do_clear_mobile_push_notifications_for_ids([user_profile.id], message_ids) do_increment_logging_stat(user_profile, COUNT_STATS["messages_read::hour"], None, event_time, increment=count) do_increment_logging_stat( user_profile, COUNT_STATS["messages_read_interactions::hour"], None, event_time, increment=min(1, count), ) return count
def get_raw_unread_data(user_profile: UserProfile) -> RawUnreadMessagesResult: excluded_recipient_ids = get_inactive_recipient_ids(user_profile) user_msgs = UserMessage.objects.filter( user_profile=user_profile ).exclude( message__recipient_id__in=excluded_recipient_ids ).extra( where=[UserMessage.where_unread()] ).values( 'message_id', 'message__sender_id', MESSAGE__TOPIC, 'message__recipient_id', 'message__recipient__type', 'message__recipient__type_id', 'flags', ).order_by("-message_id") # Limit unread messages for performance reasons. user_msgs = list(user_msgs[:MAX_UNREAD_MESSAGES]) rows = list(reversed(user_msgs)) muted_stream_ids = get_muted_stream_ids(user_profile) topic_mute_checker = build_topic_mute_checker(user_profile) def is_row_muted(stream_id: int, recipient_id: int, topic: str) -> bool: if stream_id in muted_stream_ids: return True if topic_mute_checker(recipient_id, topic): return True return False huddle_cache = {} # type: Dict[int, str] def get_huddle_users(recipient_id: int) -> str: if recipient_id in huddle_cache: return huddle_cache[recipient_id] user_ids_string = huddle_users(recipient_id) huddle_cache[recipient_id] = user_ids_string return user_ids_string pm_dict = {} stream_dict = {} unmuted_stream_msgs = set() huddle_dict = {} mentions = set() for row in rows: message_id = row['message_id'] msg_type = row['message__recipient__type'] recipient_id = row['message__recipient_id'] sender_id = row['message__sender_id'] if msg_type == Recipient.STREAM: stream_id = row['message__recipient__type_id'] topic = row[MESSAGE__TOPIC] stream_dict[message_id] = dict( stream_id=stream_id, topic=topic, sender_id=sender_id, ) if not is_row_muted(stream_id, recipient_id, topic): unmuted_stream_msgs.add(message_id) elif msg_type == Recipient.PERSONAL: pm_dict[message_id] = dict( sender_id=sender_id, ) elif msg_type == Recipient.HUDDLE: user_ids_string = get_huddle_users(recipient_id) huddle_dict[message_id] = dict( user_ids_string=user_ids_string, ) is_mentioned = (row['flags'] & UserMessage.flags.mentioned) != 0 if is_mentioned: mentions.add(message_id) return dict( pm_dict=pm_dict, stream_dict=stream_dict, muted_stream_ids=muted_stream_ids, unmuted_stream_msgs=unmuted_stream_msgs, huddle_dict=huddle_dict, mentions=mentions, )
def consume(self, event: Dict[str, Any]) -> None: start = time.time() if event["type"] == "mark_stream_messages_as_read": user_profile = get_user_profile_by_id(event["user_profile_id"]) for recipient_id in event["stream_recipient_ids"]: count = do_mark_stream_messages_as_read( user_profile, recipient_id) logger.info( "Marked %s messages as read for user %s, stream_recipient_id %s", count, user_profile.id, recipient_id, ) elif event["type"] == "mark_stream_messages_as_read_for_everyone": # This event is generated by the stream deactivation code path. batch_size = 100 offset = 0 while True: messages = Message.objects.filter( recipient_id=event["stream_recipient_id"]).order_by( "id")[offset:offset + batch_size] UserMessage.objects.filter(message__in=messages).extra( where=[UserMessage.where_unread()]).update( flags=F("flags").bitor(UserMessage.flags.read)) offset += len(messages) if len(messages) < batch_size: break logger.info( "Marked %s messages as read for all users, stream_recipient_id %s", offset, event["stream_recipient_id"], ) elif event["type"] == "clear_push_device_tokens": try: clear_push_device_tokens(event["user_profile_id"]) except PushNotificationBouncerRetryLaterError: def failure_processor(event: Dict[str, Any]) -> None: logger.warning( "Maximum retries exceeded for trigger:%s event:clear_push_device_tokens", event["user_profile_id"], ) retry_event(self.queue_name, event, failure_processor) elif event["type"] == "realm_export": realm = Realm.objects.get(id=event["realm_id"]) output_dir = tempfile.mkdtemp(prefix="zulip-export-") export_event = RealmAuditLog.objects.get(id=event["id"]) user_profile = get_user_profile_by_id(event["user_profile_id"]) try: public_url = export_realm_wrapper( realm=realm, output_dir=output_dir, threads=6, upload=True, public_only=True, delete_after_upload=True, ) except Exception: export_event.extra_data = orjson.dumps( dict(failed_timestamp=timezone_now().timestamp(), )).decode() export_event.save(update_fields=["extra_data"]) logging.error( "Data export for %s failed after %s", user_profile.realm.string_id, time.time() - start, ) notify_realm_export(user_profile) return assert public_url is not None # Update the extra_data field now that the export is complete. export_event.extra_data = orjson.dumps( dict(export_path=urllib.parse.urlparse(public_url).path, )).decode() export_event.save(update_fields=["extra_data"]) # Send a private message notification letting the user who # triggered the export know the export finished. with override_language(user_profile.default_language): content = _( "Your data export is complete and has been uploaded here:\n\n{public_url}" ).format(public_url=public_url) internal_send_private_message( sender=get_system_bot(settings.NOTIFICATION_BOT, realm.id), recipient_user=user_profile, content=content, ) # For future frontend use, also notify administrator # clients that the export happened. notify_realm_export(user_profile) logging.info( "Completed data export for %s in %s", user_profile.realm.string_id, time.time() - start, ) end = time.time() logger.info("deferred_work processed %s event (%dms)", event["type"], (end - start) * 1000)
def consume(self, event: Dict[str, Any]) -> None: if event['type'] == 'mark_stream_messages_as_read': user_profile = get_user_profile_by_id(event['user_profile_id']) client = Client.objects.get(id=event['client_id']) for stream_id in event['stream_ids']: # Since the user just unsubscribed, we don't require # an active Subscription object (otherwise, private # streams would never be accessible) (stream, recipient, sub) = access_stream_by_id(user_profile, stream_id, require_active=False) do_mark_stream_messages_as_read(user_profile, client, stream) elif event["type"] == 'mark_stream_messages_as_read_for_everyone': # This event is generated by the stream deactivation code path. batch_size = 100 offset = 0 while True: messages = Message.objects.filter(recipient_id=event["stream_recipient_id"]) \ .order_by("id")[offset:offset + batch_size] UserMessage.objects.filter(message__in=messages).extra(where=[UserMessage.where_unread()]) \ .update(flags=F('flags').bitor(UserMessage.flags.read)) offset += len(messages) if len(messages) < batch_size: break elif event['type'] == 'clear_push_device_tokens': try: clear_push_device_tokens(event["user_profile_id"]) except PushNotificationBouncerRetryLaterError: def failure_processor(event: Dict[str, Any]) -> None: logger.warning( "Maximum retries exceeded for trigger:%s event:clear_push_device_tokens", event['user_profile_id']) retry_event(self.queue_name, event, failure_processor) elif event['type'] == 'realm_export': start = time.time() realm = Realm.objects.get(id=event['realm_id']) output_dir = tempfile.mkdtemp(prefix="zulip-export-") export_event = RealmAuditLog.objects.get(id=event['id']) user_profile = get_user_profile_by_id(event['user_profile_id']) try: public_url = export_realm_wrapper(realm=realm, output_dir=output_dir, threads=6, upload=True, public_only=True, delete_after_upload=True) except Exception: export_event.extra_data = orjson.dumps(dict( failed_timestamp=timezone_now().timestamp(), )).decode() export_event.save(update_fields=['extra_data']) logging.error( "Data export for %s failed after %s", user_profile.realm.string_id, time.time() - start, ) notify_realm_export(user_profile) return assert public_url is not None # Update the extra_data field now that the export is complete. export_event.extra_data = orjson.dumps(dict( export_path=urllib.parse.urlparse(public_url).path, )).decode() export_event.save(update_fields=['extra_data']) # Send a private message notification letting the user who # triggered the export know the export finished. with override_language(user_profile.default_language): content = _("Your data export is complete and has been uploaded here:\n\n{public_url}").format(public_url=public_url) internal_send_private_message( realm=user_profile.realm, sender=get_system_bot(settings.NOTIFICATION_BOT), recipient_user=user_profile, content=content, ) # For future frontend use, also notify administrator # clients that the export happened. notify_realm_export(user_profile) logging.info( "Completed data export for %s in %s", user_profile.realm.string_id, time.time() - start, )
def do_update_message_flags(user_profile: UserProfile, operation: str, flag: str, messages: List[int]) -> Tuple[int, List[int]]: valid_flags = [ item for item in UserMessage.flags if item not in UserMessage.NON_API_FLAGS ] if flag not in valid_flags: raise JsonableError(_("Invalid flag: '{}'").format(flag)) if flag in UserMessage.NON_EDITABLE_FLAGS: raise JsonableError(_("Flag not editable: '{}'").format(flag)) if operation not in ("add", "remove"): raise JsonableError( _("Invalid message flag operation: '{}'").format(operation)) flagattr = getattr(UserMessage.flags, flag) msgs = UserMessage.objects.filter(user_profile=user_profile, message_id__in=messages) um_message_ids = {um.message_id for um in msgs} historical_message_ids = list(set(messages) - um_message_ids) # Users can mutate flags for messages that don't have a UserMessage yet. # First, validate that the user is even allowed to access these message_ids. for message_id in historical_message_ids: access_message(user_profile, message_id) # And then create historical UserMessage records. See the called function for more context. create_historical_user_messages(user_id=user_profile.id, message_ids=historical_message_ids) with transaction.atomic(): if operation == "add": msgs = (msgs.select_for_update().order_by("message_id").extra( where=[UserMessage.where_flag_is_absent(flagattr)])) updated_message_ids = [um.message_id for um in msgs] msgs.filter(message_id__in=updated_message_ids).update( flags=F("flags").bitor(flagattr)) elif operation == "remove": msgs = (msgs.select_for_update().order_by("message_id").extra( where=[UserMessage.where_flag_is_present(flagattr)])) updated_message_ids = [um.message_id for um in msgs] msgs.filter(message_id__in=updated_message_ids).update( flags=F("flags").bitand(~flagattr)) count = len(updated_message_ids) event = { "type": "update_message_flags", "op": operation, "operation": operation, "flag": flag, "messages": updated_message_ids, "all": False, } if flag == "read" and operation == "remove": # When removing the read flag (i.e. marking messages as # unread), extend the event with an additional object with # details on the messages required to update the client's # `unread_msgs` data structure. raw_unread_data = get_raw_unread_data(user_profile, updated_message_ids) event["message_details"] = format_unread_message_details( user_profile.id, raw_unread_data) send_event(user_profile.realm, event, [user_profile.id]) if flag == "read" and operation == "add": event_time = timezone_now() do_clear_mobile_push_notifications_for_ids([user_profile.id], updated_message_ids) do_increment_logging_stat(user_profile, COUNT_STATS["messages_read::hour"], None, event_time, increment=count) do_increment_logging_stat( user_profile, COUNT_STATS["messages_read_interactions::hour"], None, event_time, increment=min(1, count), ) return count, updated_message_ids
def get_unread_message_ids_per_recipient(user_profile): # type: (UserProfile) -> Dict[str, Any] excluded_recipient_ids = get_inactive_recipient_ids(user_profile) user_msgs = UserMessage.objects.filter( user_profile=user_profile ).exclude( message__recipient_id__in=excluded_recipient_ids ).extra( where=[UserMessage.where_unread()] ).values( 'message_id', 'message__sender_id', 'message__subject', 'message__recipient_id', 'message__recipient__type', 'message__recipient__type_id', 'flags', ).order_by("-message_id") # Limit unread messages for performance reasons. user_msgs = list(user_msgs[:MAX_UNREAD_MESSAGES]) rows = list(reversed(user_msgs)) count = len(rows) pm_msgs = [ dict( sender_id=row['message__sender_id'], message_id=row['message_id'], ) for row in rows if row['message__recipient__type'] == Recipient.PERSONAL] pm_objects = aggregate_dict( input_rows=pm_msgs, lookup_fields=[ 'sender_id', ], input_field='message_id', output_field='unread_message_ids', ) stream_msgs = [ dict( stream_id=row['message__recipient__type_id'], topic=row['message__subject'], message_id=row['message_id'], ) for row in rows if row['message__recipient__type'] == Recipient.STREAM] stream_objects = aggregate_dict( input_rows=stream_msgs, lookup_fields=[ 'stream_id', 'topic', ], input_field='message_id', output_field='unread_message_ids', ) huddle_msgs = [ dict( recipient_id=row['message__recipient_id'], message_id=row['message_id'], ) for row in rows if row['message__recipient__type'] == Recipient.HUDDLE] huddle_objects = aggregate_dict( input_rows=huddle_msgs, lookup_fields=[ 'recipient_id', ], input_field='message_id', output_field='unread_message_ids', ) for huddle in huddle_objects: huddle['user_ids_string'] = huddle_users(huddle['recipient_id']) del huddle['recipient_id'] mentioned_message_ids = [ row['message_id'] for row in rows if (row['flags'] & UserMessage.flags.mentioned) != 0] result = dict( pms=pm_objects, streams=stream_objects, huddles=huddle_objects, mentions=mentioned_message_ids, count=count, ) return result
def store_user_message_to_insert(message): # type: (Message) -> None message = UserMessage(user_profile=user_profile, message_id=message['id'], flags=0) user_messages_to_insert.append(message)
def flags_list(self) -> List[str]: return UserMessage.flags_list_for_flags(self.flags)
def get_raw_unread_data(user_profile: UserProfile) -> RawUnreadMessagesResult: excluded_recipient_ids = get_inactive_recipient_ids(user_profile) user_msgs = UserMessage.objects.filter(user_profile=user_profile).exclude( message__recipient_id__in=excluded_recipient_ids).extra( where=[UserMessage.where_unread()]).values( 'message_id', 'message__sender_id', MESSAGE__TOPIC, 'message__recipient_id', 'message__recipient__type', 'message__recipient__type_id', 'flags', ).order_by("-message_id") # Limit unread messages for performance reasons. user_msgs = list(user_msgs[:MAX_UNREAD_MESSAGES]) rows = list(reversed(user_msgs)) muted_stream_ids = get_muted_stream_ids(user_profile) topic_mute_checker = build_topic_mute_checker(user_profile) def is_row_muted(stream_id: int, recipient_id: int, topic: str) -> bool: if stream_id in muted_stream_ids: return True if topic_mute_checker(recipient_id, topic): return True return False huddle_cache = {} # type: Dict[int, str] def get_huddle_users(recipient_id: int) -> str: if recipient_id in huddle_cache: return huddle_cache[recipient_id] user_ids_string = huddle_users(recipient_id) huddle_cache[recipient_id] = user_ids_string return user_ids_string pm_dict = {} stream_dict = {} unmuted_stream_msgs = set() huddle_dict = {} mentions = set() for row in rows: message_id = row['message_id'] msg_type = row['message__recipient__type'] recipient_id = row['message__recipient_id'] sender_id = row['message__sender_id'] if msg_type == Recipient.STREAM: stream_id = row['message__recipient__type_id'] topic = row[MESSAGE__TOPIC] stream_dict[message_id] = dict( stream_id=stream_id, topic=topic, sender_id=sender_id, ) if not is_row_muted(stream_id, recipient_id, topic): unmuted_stream_msgs.add(message_id) elif msg_type == Recipient.PERSONAL: if sender_id == user_profile.id: other_user_id = row['message__recipient__type_id'] else: other_user_id = sender_id # The `sender_id` field here is misnamed. It's really # just the other participant in a PM conversation. For # most unread PM messages, the other user is also the sender, # but that's not true for certain messages sent from the # API. Unfortunately, it's difficult now to rename the # field without breaking mobile. pm_dict[message_id] = dict(sender_id=other_user_id, ) elif msg_type == Recipient.HUDDLE: user_ids_string = get_huddle_users(recipient_id) huddle_dict[message_id] = dict(user_ids_string=user_ids_string, ) # TODO: Add support for alert words here as well. is_mentioned = (row['flags'] & UserMessage.flags.mentioned) != 0 is_wildcard_mentioned = (row['flags'] & UserMessage.flags.wildcard_mentioned) != 0 if is_mentioned: mentions.add(message_id) if is_wildcard_mentioned: if msg_type == Recipient.STREAM: stream_id = row['message__recipient__type_id'] topic = row[MESSAGE__TOPIC] if not is_row_muted(stream_id, recipient_id, topic): mentions.add(message_id) else: # nocoverage # TODO: Test wildcard mentions in PMs. mentions.add(message_id) return dict( pm_dict=pm_dict, stream_dict=stream_dict, muted_stream_ids=muted_stream_ids, unmuted_stream_msgs=unmuted_stream_msgs, huddle_dict=huddle_dict, mentions=mentions, )
def store_user_message_to_insert(message: Message) -> None: message = UserMessage(user_profile=user_profile, message_id=message["id"], flags=0) user_messages_to_insert.append(message)
def get_unread_message_ids_per_recipient(user_profile): # type: (UserProfile) -> Dict[str, Any] excluded_recipient_ids = get_inactive_recipient_ids(user_profile) user_msgs = UserMessage.objects.filter(user_profile=user_profile).exclude( message__recipient_id__in=excluded_recipient_ids).extra( where=[UserMessage.where_unread()]).values( 'message_id', 'message__sender_id', 'message__subject', 'message__recipient_id', 'message__recipient__type', 'message__recipient__type_id', 'flags', ).order_by("-message_id") # Limit unread messages for performance reasons. user_msgs = list(user_msgs[:MAX_UNREAD_MESSAGES]) rows = list(reversed(user_msgs)) count = len(rows) pm_msgs = [ dict( sender_id=row['message__sender_id'], message_id=row['message_id'], ) for row in rows if row['message__recipient__type'] == Recipient.PERSONAL ] pm_objects = aggregate_dict( input_rows=pm_msgs, lookup_fields=[ 'sender_id', ], input_field='message_id', output_field='unread_message_ids', ) stream_msgs = [ dict( stream_id=row['message__recipient__type_id'], topic=row['message__subject'], message_id=row['message_id'], ) for row in rows if row['message__recipient__type'] == Recipient.STREAM ] stream_objects = aggregate_dict( input_rows=stream_msgs, lookup_fields=[ 'stream_id', 'topic', ], input_field='message_id', output_field='unread_message_ids', ) huddle_msgs = [ dict( recipient_id=row['message__recipient_id'], message_id=row['message_id'], ) for row in rows if row['message__recipient__type'] == Recipient.HUDDLE ] huddle_objects = aggregate_dict( input_rows=huddle_msgs, lookup_fields=[ 'recipient_id', ], input_field='message_id', output_field='unread_message_ids', ) for huddle in huddle_objects: huddle['user_ids_string'] = huddle_users(huddle['recipient_id']) del huddle['recipient_id'] mentioned_message_ids = [ row['message_id'] for row in rows if (row['flags'] & UserMessage.flags.mentioned) != 0 ] result = dict( pms=pm_objects, streams=stream_objects, huddles=huddle_objects, mentions=mentioned_message_ids, count=count, ) return result
def get_raw_unread_data(user_profile): # type: (UserProfile) -> Dict[str, Any] excluded_recipient_ids = get_inactive_recipient_ids(user_profile) user_msgs = UserMessage.objects.filter( user_profile=user_profile ).exclude( message__recipient_id__in=excluded_recipient_ids ).extra( where=[UserMessage.where_unread()] ).values( 'message_id', 'message__sender_id', 'message__subject', 'message__recipient_id', 'message__recipient__type', 'message__recipient__type_id', 'flags', ).order_by("-message_id") # Limit unread messages for performance reasons. user_msgs = list(user_msgs[:MAX_UNREAD_MESSAGES]) rows = list(reversed(user_msgs)) muted_stream_ids = get_muted_stream_ids(user_profile) topic_mute_checker = build_topic_mute_checker(user_profile) def is_row_muted(stream_id, recipient_id, topic): # type: (int, int, Text) -> bool if stream_id in muted_stream_ids: return True if topic_mute_checker(recipient_id, topic): return True return False huddle_cache = {} # type: Dict[int, str] def get_huddle_users(recipient_id): # type: (int) -> str if recipient_id in huddle_cache: return huddle_cache[recipient_id] user_ids_string = huddle_users(recipient_id) huddle_cache[recipient_id] = user_ids_string return user_ids_string pm_dict = {} stream_dict = {} unmuted_stream_msgs = set() huddle_dict = {} mentions = set() for row in rows: message_id = row['message_id'] msg_type = row['message__recipient__type'] recipient_id = row['message__recipient_id'] sender_id = row['message__sender_id'] if msg_type == Recipient.STREAM: stream_id = row['message__recipient__type_id'] topic = row['message__subject'] stream_dict[message_id] = dict( stream_id=stream_id, topic=topic, sender_id=sender_id, ) if not is_row_muted(stream_id, recipient_id, topic): unmuted_stream_msgs.add(message_id) elif msg_type == Recipient.PERSONAL: pm_dict[message_id] = dict( sender_id=sender_id, ) elif msg_type == Recipient.HUDDLE: user_ids_string = get_huddle_users(recipient_id) huddle_dict[message_id] = dict( user_ids_string=user_ids_string, ) is_mentioned = (row['flags'] & UserMessage.flags.mentioned) != 0 if is_mentioned: mentions.add(message_id) return dict( pm_dict=pm_dict, stream_dict=stream_dict, muted_stream_ids=muted_stream_ids, unmuted_stream_msgs=unmuted_stream_msgs, huddle_dict=huddle_dict, mentions=mentions, )
def get_raw_unread_data(user_profile: UserProfile) -> RawUnreadMessagesResult: excluded_recipient_ids = get_inactive_recipient_ids(user_profile) user_msgs = UserMessage.objects.filter(user_profile=user_profile).exclude( message__recipient_id__in=excluded_recipient_ids).extra( where=[UserMessage.where_unread()]).values( 'message_id', 'message__sender_id', MESSAGE__TOPIC, 'message__recipient_id', 'message__recipient__type', 'message__recipient__type_id', 'flags', ).order_by("-message_id") # Limit unread messages for performance reasons. user_msgs = list(user_msgs[:MAX_UNREAD_MESSAGES]) rows = list(reversed(user_msgs)) muted_stream_ids = get_muted_stream_ids(user_profile) topic_mute_checker = build_topic_mute_checker(user_profile) def is_row_muted(stream_id: int, recipient_id: int, topic: str) -> bool: if stream_id in muted_stream_ids: return True if topic_mute_checker(recipient_id, topic): return True return False huddle_cache = {} # type: Dict[int, str] def get_huddle_users(recipient_id: int) -> str: if recipient_id in huddle_cache: return huddle_cache[recipient_id] user_ids_string = huddle_users(recipient_id) huddle_cache[recipient_id] = user_ids_string return user_ids_string pm_dict = {} stream_dict = {} unmuted_stream_msgs = set() huddle_dict = {} mentions = set() for row in rows: message_id = row['message_id'] msg_type = row['message__recipient__type'] recipient_id = row['message__recipient_id'] sender_id = row['message__sender_id'] if msg_type == Recipient.STREAM: stream_id = row['message__recipient__type_id'] topic = row[MESSAGE__TOPIC] stream_dict[message_id] = dict( stream_id=stream_id, topic=topic, sender_id=sender_id, ) if not is_row_muted(stream_id, recipient_id, topic): unmuted_stream_msgs.add(message_id) elif msg_type == Recipient.PERSONAL: pm_dict[message_id] = dict(sender_id=sender_id, ) elif msg_type == Recipient.HUDDLE: user_ids_string = get_huddle_users(recipient_id) huddle_dict[message_id] = dict(user_ids_string=user_ids_string, ) # TODO: Add support for alert words here as well. is_mentioned = (row['flags'] & UserMessage.flags.mentioned) != 0 is_wildcard_mentioned = (row['flags'] & UserMessage.flags.wildcard_mentioned) != 0 if is_mentioned: mentions.add(message_id) if is_wildcard_mentioned: if msg_type == Recipient.STREAM: stream_id = row['message__recipient__type_id'] topic = row[MESSAGE__TOPIC] if not is_row_muted(stream_id, recipient_id, topic): mentions.add(message_id) else: # nocoverage # TODO: Test wildcard mentions in PMs. mentions.add(message_id) return dict( pm_dict=pm_dict, stream_dict=stream_dict, muted_stream_ids=muted_stream_ids, unmuted_stream_msgs=unmuted_stream_msgs, huddle_dict=huddle_dict, mentions=mentions, )
def user_info(um: UserMessage) -> Dict[str, Any]: return { "id": um.user_profile_id, "flags": um.flags_list(), }
def get_messages_backend(request: HttpRequest, user_profile: UserProfile, anchor_val: Optional[str]=REQ( 'anchor', str_validator=check_string, default=None), num_before: int=REQ(converter=to_non_negative_int), num_after: int=REQ(converter=to_non_negative_int), narrow: OptionalNarrowListT=REQ('narrow', converter=narrow_parameter, default=None), use_first_unread_anchor_val: bool=REQ('use_first_unread_anchor', validator=check_bool, default=False), client_gravatar: bool=REQ(validator=check_bool, default=False), apply_markdown: bool=REQ(validator=check_bool, default=True)) -> HttpResponse: anchor = parse_anchor_value(anchor_val, use_first_unread_anchor_val) if num_before + num_after > MAX_MESSAGES_PER_FETCH: return json_error(_("Too many messages requested (maximum {}).").format( MAX_MESSAGES_PER_FETCH, )) if user_profile.realm.email_address_visibility != Realm.EMAIL_ADDRESS_VISIBILITY_EVERYONE: # If email addresses are only available to administrators, # clients cannot compute gravatars, so we force-set it to false. client_gravatar = False include_history = ok_to_include_history(narrow, user_profile) if include_history: # The initial query in this case doesn't use `zerver_usermessage`, # and isn't yet limited to messages the user is entitled to see! # # This is OK only because we've made sure this is a narrow that # will cause us to limit the query appropriately later. # See `ok_to_include_history` for details. need_message = True need_user_message = False elif narrow is None: # We need to limit to messages the user has received, but we don't actually # need any fields from Message need_message = False need_user_message = True else: need_message = True need_user_message = True query, inner_msg_id_col = get_base_query_for_search( user_profile=user_profile, need_message=need_message, need_user_message=need_user_message, ) query, is_search = add_narrow_conditions( user_profile=user_profile, inner_msg_id_col=inner_msg_id_col, query=query, narrow=narrow, ) if narrow is not None: # Add some metadata to our logging data for narrows verbose_operators = [] for term in narrow: if term['operator'] == "is": verbose_operators.append("is:" + term['operand']) else: verbose_operators.append(term['operator']) request._log_data['extra'] = "[{}]".format(",".join(verbose_operators)) sa_conn = get_sqlalchemy_connection() if anchor is None: # The use_first_unread_anchor code path anchor = find_first_unread_anchor( sa_conn, user_profile, narrow, ) anchored_to_left = (anchor == 0) # Set value that will be used to short circuit the after_query # altogether and avoid needless conditions in the before_query. anchored_to_right = (anchor >= LARGER_THAN_MAX_MESSAGE_ID) if anchored_to_right: num_after = 0 first_visible_message_id = get_first_visible_message_id(user_profile.realm) query = limit_query_to_range( query=query, num_before=num_before, num_after=num_after, anchor=anchor, anchored_to_left=anchored_to_left, anchored_to_right=anchored_to_right, id_col=inner_msg_id_col, first_visible_message_id=first_visible_message_id, ) main_query = alias(query) query = select(main_query.c, None, main_query).order_by(column("message_id").asc()) # This is a hack to tag the query we use for testing query = query.prefix_with("/* get_messages */") rows = list(sa_conn.execute(query).fetchall()) query_info = post_process_limited_query( rows=rows, num_before=num_before, num_after=num_after, anchor=anchor, anchored_to_left=anchored_to_left, anchored_to_right=anchored_to_right, first_visible_message_id=first_visible_message_id, ) rows = query_info['rows'] # The following is a little messy, but ensures that the code paths # are similar regardless of the value of include_history. The # 'user_messages' dictionary maps each message to the user's # UserMessage object for that message, which we will attach to the # rendered message dict before returning it. We attempt to # bulk-fetch rendered message dicts from remote cache using the # 'messages' list. message_ids: List[int] = [] user_message_flags: Dict[int, List[str]] = {} if include_history: message_ids = [row[0] for row in rows] # TODO: This could be done with an outer join instead of two queries um_rows = UserMessage.objects.filter(user_profile=user_profile, message__id__in=message_ids) user_message_flags = {um.message_id: um.flags_list() for um in um_rows} for message_id in message_ids: if message_id not in user_message_flags: user_message_flags[message_id] = ["read", "historical"] else: for row in rows: message_id = row[0] flags = row[1] user_message_flags[message_id] = UserMessage.flags_list_for_flags(flags) message_ids.append(message_id) search_fields: Dict[int, Dict[str, str]] = dict() if is_search: for row in rows: message_id = row[0] (topic_name, rendered_content, content_matches, topic_matches) = row[-4:] try: search_fields[message_id] = get_search_fields(rendered_content, topic_name, content_matches, topic_matches) except UnicodeDecodeError as err: # nocoverage # No coverage for this block since it should be # impossible, and we plan to remove it once we've # debugged the case that makes it happen. raise Exception(str(err), message_id, narrow) message_list = messages_for_ids( message_ids=message_ids, user_message_flags=user_message_flags, search_fields=search_fields, apply_markdown=apply_markdown, client_gravatar=client_gravatar, allow_edit_history=user_profile.realm.allow_edit_history, ) statsd.incr('loaded_old_messages', len(message_list)) ret = dict( messages=message_list, result='success', msg='', found_anchor=query_info['found_anchor'], found_oldest=query_info['found_oldest'], found_newest=query_info['found_newest'], history_limited=query_info['history_limited'], anchor=anchor, ) return json_success(ret)
def get_unread_message_ids_per_recipient(user_profile): # type: (UserProfile) -> Dict[str, List[Dict[str, Any]]] user_msgs = UserMessage.objects.filter(user_profile=user_profile).extra( where=[UserMessage.where_unread()]).values( 'message_id', 'message__sender_id', 'message__subject', 'message__recipient_id', 'message__recipient__type', 'message__recipient__type_id', 'flags', ) rows = list(user_msgs) pm_msgs = [ dict( sender_id=row['message__sender_id'], message_id=row['message_id'], ) for row in rows if row['message__recipient__type'] == Recipient.PERSONAL ] pm_objects = aggregate_dict( input_rows=pm_msgs, lookup_fields=[ 'sender_id', ], input_field='message_id', output_field='unread_message_ids', ) stream_msgs = [ dict( stream_id=row['message__recipient__type_id'], topic=row['message__subject'], message_id=row['message_id'], ) for row in rows if row['message__recipient__type'] == Recipient.STREAM ] stream_objects = aggregate_dict( input_rows=stream_msgs, lookup_fields=[ 'stream_id', 'topic', ], input_field='message_id', output_field='unread_message_ids', ) huddle_msgs = [ dict( recipient_id=row['message__recipient_id'], message_id=row['message_id'], ) for row in rows if row['message__recipient__type'] == Recipient.HUDDLE ] huddle_objects = aggregate_dict( input_rows=huddle_msgs, lookup_fields=[ 'recipient_id', ], input_field='message_id', output_field='unread_message_ids', ) for huddle in huddle_objects: huddle['user_ids_string'] = huddle_users(huddle['recipient_id']) del huddle['recipient_id'] mentioned_message_ids = [ row['message_id'] for row in rows if (row['flags'] & UserMessage.flags.mentioned) != 0 ] result = dict( pms=pm_objects, streams=stream_objects, huddles=huddle_objects, mentions=mentioned_message_ids, ) return result